diff --git a/apps/api-runtime/src/main/java/com/akto/runtime/Main.java b/apps/api-runtime/src/main/java/com/akto/runtime/Main.java index e198a16c8f..2ca328ec89 100644 --- a/apps/api-runtime/src/main/java/com/akto/runtime/Main.java +++ b/apps/api-runtime/src/main/java/com/akto/runtime/Main.java @@ -453,7 +453,7 @@ public static void initializeRuntime(){ public static void initializeRuntimeHelper() { SingleTypeInfoDao.instance.getMCollection().updateMany(Filters.exists("apiCollectionId", false), Updates.set("apiCollectionId", 0)); - DaoInit.createIndices(); + // DaoInit.createIndices(); insertRuntimeFilters(); try { AccountSettingsDao.instance.updateVersion(AccountSettings.API_RUNTIME_VERSION); diff --git a/apps/database-abstractor/pom.xml b/apps/database-abstractor/pom.xml index d009928532..f929f5748f 100644 --- a/apps/database-abstractor/pom.xml +++ b/apps/database-abstractor/pom.xml @@ -25,23 +25,11 @@ - - de.flapdoodle.embed - de.flapdoodle.embed.mongo - 3.2.6 - test - - - - org.mortbay.jetty - jetty - 6.1.26 - - + org.apache.struts struts2-core - 2.5.30 + 2.5.33 @@ -54,11 +42,16 @@ utils ${project.version} + + com.akto.apps.mini-testing + mini-testing + ${project.version} + org.apache.struts struts2-json-plugin - 2.5.30 + 2.5.33 diff --git a/apps/database-abstractor/src/main/java/com/akto/action/CodeAnalysisAction.java b/apps/database-abstractor/src/main/java/com/akto/action/CodeAnalysisAction.java new file mode 100644 index 0000000000..528f42b53a --- /dev/null +++ b/apps/database-abstractor/src/main/java/com/akto/action/CodeAnalysisAction.java @@ -0,0 +1,386 @@ +package com.akto.action; + + +import java.net.URI; +import java.util.*; + +import com.akto.dao.*; +import com.akto.dto.*; +import com.akto.dto.type.SingleTypeInfo; +import com.akto.types.CappedSet; +import com.opensymphony.xwork2.ActionSupport; +import org.bson.Document; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; + +import com.akto.dao.context.Context; +import com.akto.dto.type.SingleTypeInfo.SuperType; +import com.akto.log.LoggerMaker; +import com.akto.log.LoggerMaker.LogDb; +import com.mongodb.BasicDBObject; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.model.Updates; +import com.mongodb.client.model.WriteModel; + +import static com.akto.util.HttpRequestResponseUtils.extractValuesFromPayload; +import static com.akto.util.HttpRequestResponseUtils.generateSTIsFromPayload; + +public class CodeAnalysisAction extends ActionSupport { + + private String projectName; + private String repoName; + private boolean isLastBatch; + private List codeAnalysisApisList; + private CodeAnalysisRepo codeAnalysisRepo; + public static final int MAX_BATCH_SIZE = 100; + + private static final LoggerMaker loggerMaker = new LoggerMaker(CodeAnalysisAction.class); + + public String syncExtractedAPIs() { + String apiCollectionName = projectName + "/" + repoName; + loggerMaker.infoAndAddToDb("Syncing code analysis endpoints for collection: " + apiCollectionName, LogDb.DASHBOARD); + + if (codeAnalysisApisList == null) { + loggerMaker.errorAndAddToDb("Code analysis api's list is null", LogDb.DASHBOARD); + addActionError("Code analysis api's list is null"); + return ERROR.toUpperCase(); + } + + if (codeAnalysisRepo == null) { + loggerMaker.errorAndAddToDb("Code analysis repo is null", LogDb.DASHBOARD); + addActionError("Code analysis repo is null"); + return ERROR.toUpperCase(); + } + + // Ensure batch size is not exceeded + if (codeAnalysisApisList.size() > MAX_BATCH_SIZE) { + String errorMsg = "Code analysis api's sync batch size exceeded. Max Batch size: " + MAX_BATCH_SIZE + " Batch size: " + codeAnalysisApisList.size(); + loggerMaker.errorAndAddToDb(errorMsg, LogDb.DASHBOARD); + addActionError(errorMsg); + return ERROR.toUpperCase(); + } + + // populate code analysis api map + Map codeAnalysisApisMap = new HashMap<>(); + for (CodeAnalysisApi codeAnalysisApi: codeAnalysisApisList) { + codeAnalysisApisMap.put(codeAnalysisApi.generateCodeAnalysisApisMapKey(), codeAnalysisApi); + } + + ApiCollection apiCollection = ApiCollectionsDao.instance.findByName(apiCollectionName); + if (apiCollection == null) { + apiCollection = new ApiCollection(Context.now(), apiCollectionName, Context.now(), new HashSet<>(), null, 0, false, false); + ApiCollectionsDao.instance.insertOne(apiCollection); + } + + /* + * In some cases it is not possible to determine the type of template url from source code + * In such cases, we can use the information from traffic endpoints to match the traffic and source code endpoints + * + * Eg: + * Source code endpoints: + * GET /books/STRING -> GET /books/AKTO_TEMPLATE_STR -> GET /books/INTEGER + * POST /city/STRING/district/STRING -> POST /city/AKTO_TEMPLATE_STR/district/AKTO_TEMPLATE_STR -> POST /city/STRING/district/INTEGER + * Traffic endpoints: + * GET /books/INTEGER -> GET /books/AKTO_TEMPLATE_STR + * POST /city/STRING/district/INTEGER -> POST /city/AKTO_TEMPLATE_STR/district/AKTO_TEMPLATE_STR + */ + + List trafficApis = ApiCollectionsDao.fetchEndpointsInCollectionUsingHost(apiCollection.getId(), 0, -1, 60 * 24 * 60 * 60); + Map trafficApiEndpointAktoTemplateStrToOriginalMap = new HashMap<>(); + List trafficApiKeys = new ArrayList<>(); + for (BasicDBObject trafficApi: trafficApis) { + BasicDBObject trafficApiApiInfoKey = (BasicDBObject) trafficApi.get("_id"); + String trafficApiMethod = trafficApiApiInfoKey.getString("method"); + String trafficApiUrl = trafficApiApiInfoKey.getString("url"); + String trafficApiEndpoint = ""; + + // extract path name from url + try { + // Directly parse the trafficApiUrl as a URI + URI uri = new URI(trafficApiUrl); + trafficApiEndpoint = uri.getPath(); + + // Decode any percent-encoded characters in the path + trafficApiEndpoint = java.net.URLDecoder.decode(trafficApiEndpoint, "UTF-8"); + + } catch (Exception e) { + loggerMaker.errorAndAddToDb("Error parsing URI: " + trafficApiUrl, LogDb.DASHBOARD); + continue; + } + + + // Ensure endpoint doesn't end with a slash + if (trafficApiEndpoint.length() > 1 && trafficApiEndpoint.endsWith("/")) { + trafficApiEndpoint = trafficApiEndpoint.substring(0, trafficApiEndpoint.length() - 1); + } + + String trafficApiKey = trafficApiMethod + " " + trafficApiEndpoint; + trafficApiKeys.add(trafficApiKey); + + String trafficApiEndpointAktoTemplateStr = trafficApiEndpoint; + + for (SuperType type : SuperType.values()) { + // Replace each occurrence of Akto template url format with"AKTO_TEMPLATE_STRING" + trafficApiEndpointAktoTemplateStr = trafficApiEndpointAktoTemplateStr.replace(type.name(), "AKTO_TEMPLATE_STR"); + } + + trafficApiEndpointAktoTemplateStrToOriginalMap.put(trafficApiEndpointAktoTemplateStr, trafficApiEndpoint); + } + + Map tempCodeAnalysisApisMap = new HashMap<>(codeAnalysisApisMap); + for (Map.Entry codeAnalysisApiEntry: codeAnalysisApisMap.entrySet()) { + String codeAnalysisApiKey = codeAnalysisApiEntry.getKey(); + CodeAnalysisApi codeAnalysisApi = codeAnalysisApiEntry.getValue(); + + String codeAnalysisApiEndpoint = codeAnalysisApi.getEndpoint(); + + String codeAnalysisApiEndpointAktoTemplateStr = codeAnalysisApiEndpoint; + + for (SuperType type : SuperType.values()) { + // Replace each occurrence of Akto template url format with "AKTO_TEMPLATE_STRING" + codeAnalysisApiEndpointAktoTemplateStr = codeAnalysisApiEndpointAktoTemplateStr.replace(type.name(), "AKTO_TEMPLATE_STR"); + } + + if(codeAnalysisApiEndpointAktoTemplateStr.contains("AKTO_TEMPLATE_STR") && trafficApiEndpointAktoTemplateStrToOriginalMap.containsKey(codeAnalysisApiEndpointAktoTemplateStr)) { + CodeAnalysisApi newCodeAnalysisApi = new CodeAnalysisApi( + codeAnalysisApi.getMethod(), + trafficApiEndpointAktoTemplateStrToOriginalMap.get(codeAnalysisApiEndpointAktoTemplateStr), + codeAnalysisApi.getLocation(), codeAnalysisApi.getRequestBody(), codeAnalysisApi.getResponseBody()); + + tempCodeAnalysisApisMap.remove(codeAnalysisApiKey); + tempCodeAnalysisApisMap.put(newCodeAnalysisApi.generateCodeAnalysisApisMapKey(), newCodeAnalysisApi); + } + } + + + /* + * Match endpoints between traffic and source code endpoints, when only method is different + * Eg: + * Source code endpoints: + * POST /books + * Traffic endpoints: + * PUT /books + * Add PUT /books to source code endpoints + */ + for(String trafficApiKey: trafficApiKeys) { + if (!codeAnalysisApisMap.containsKey(trafficApiKey)) { + for(Map.Entry codeAnalysisApiEntry: tempCodeAnalysisApisMap.entrySet()) { + CodeAnalysisApi codeAnalysisApi = codeAnalysisApiEntry.getValue(); + String codeAnalysisApiEndpoint = codeAnalysisApi.getEndpoint(); + + String trafficApiMethod = "", trafficApiEndpoint = ""; + try { + String[] trafficApiKeyParts = trafficApiKey.split(" "); + trafficApiMethod = trafficApiKeyParts[0]; + trafficApiEndpoint = trafficApiKeyParts[1]; + } catch (Exception e) { + loggerMaker.errorAndAddToDb("Error parsing traffic API key: " + trafficApiKey, LogDb.DASHBOARD); + continue; + } + + if (codeAnalysisApiEndpoint.equals(trafficApiEndpoint)) { + CodeAnalysisApi newCodeAnalysisApi = new CodeAnalysisApi( + trafficApiMethod, + trafficApiEndpoint, + codeAnalysisApi.getLocation(), codeAnalysisApi.getRequestBody(), codeAnalysisApi.getResponseBody()); + + tempCodeAnalysisApisMap.put(newCodeAnalysisApi.generateCodeAnalysisApisMapKey(), newCodeAnalysisApi); + break; + } + } + } + } + + codeAnalysisApisMap = tempCodeAnalysisApisMap; + + ObjectId codeAnalysisCollectionId = null; + try { + // ObjectId for new code analysis collection + codeAnalysisCollectionId = new ObjectId(); + + String projectDir = projectName + "/" + repoName; //todo: + + CodeAnalysisCollection codeAnalysisCollection = CodeAnalysisCollectionDao.instance.updateOne( + Filters.eq("codeAnalysisCollectionName", apiCollectionName), + Updates.combine( + Updates.setOnInsert(CodeAnalysisCollection.ID, codeAnalysisCollectionId), + Updates.setOnInsert(CodeAnalysisCollection.NAME, apiCollectionName), + Updates.set(CodeAnalysisCollection.PROJECT_DIR, projectDir), + Updates.setOnInsert(CodeAnalysisCollection.API_COLLECTION_ID, apiCollection.getId()) + ) + ); + + // Set code analysis collection id if existing collection is updated + if (codeAnalysisCollection != null) { + codeAnalysisCollectionId = codeAnalysisCollection.getId(); + } + } catch (Exception e) { + loggerMaker.errorAndAddToDb("Error updating code analysis collection: " + apiCollectionName + " Error: " + e.getMessage(), LogDb.DASHBOARD); + addActionError("Error syncing code analysis collection: " + apiCollectionName); + return ERROR.toUpperCase(); + } + + int now = Context.now(); + + if (codeAnalysisCollectionId != null) { + List> bulkUpdates = new ArrayList<>(); + List> bulkUpdatesSTI = new ArrayList<>(); + + for(Map.Entry codeAnalysisApiEntry: codeAnalysisApisMap.entrySet()) { + CodeAnalysisApi codeAnalysisApi = codeAnalysisApiEntry.getValue(); + CodeAnalysisApiInfo.CodeAnalysisApiInfoKey codeAnalysisApiInfoKey = new CodeAnalysisApiInfo.CodeAnalysisApiInfoKey(codeAnalysisCollectionId, codeAnalysisApi.getMethod(), codeAnalysisApi.getEndpoint()); + + bulkUpdates.add( + new UpdateOneModel<>( + Filters.eq(CodeAnalysisApiInfo.ID, codeAnalysisApiInfoKey), + Updates.combine( + Updates.setOnInsert(CodeAnalysisApiInfo.ID, codeAnalysisApiInfoKey), + Updates.set(CodeAnalysisApiInfo.LOCATION, codeAnalysisApi.getLocation()), + Updates.setOnInsert(CodeAnalysisApiInfo.DISCOVERED_TS, now), + Updates.set(CodeAnalysisApiInfo.LAST_SEEN_TS, now) + ), + new UpdateOptions().upsert(true) + ) + ); + + String requestBody = codeAnalysisApi.getRequestBody(); + String responseBody = codeAnalysisApi.getResponseBody(); + + List singleTypeInfos = new ArrayList<>(); + singleTypeInfos.addAll(generateSTIsFromPayload(apiCollection.getId(), codeAnalysisApi.getEndpoint(), codeAnalysisApi.getMethod(), requestBody, -1)); + singleTypeInfos.addAll(generateSTIsFromPayload(apiCollection.getId(), codeAnalysisApi.getEndpoint(), codeAnalysisApi.getMethod(), responseBody, 200)); + + Bson update = Updates.combine(Updates.max(SingleTypeInfo.LAST_SEEN, now), Updates.setOnInsert("timestamp", now)); + + for (SingleTypeInfo singleTypeInfo: singleTypeInfos) { + bulkUpdatesSTI.add( + new UpdateOneModel<>( + SingleTypeInfoDao.createFilters(singleTypeInfo), + update, + new UpdateOptions().upsert(true) + ) + ); + } + + } + + if (!bulkUpdatesSTI.isEmpty()) { + CodeAnalysisSingleTypeInfoDao.instance.getMCollection().bulkWrite(bulkUpdatesSTI); + } + + if (bulkUpdates.size() > 0) { + try { + CodeAnalysisApiInfoDao.instance.getMCollection().bulkWrite(bulkUpdates); + } catch (Exception e) { + loggerMaker.errorAndAddToDb("Error updating code analysis api infos: " + apiCollectionName + " Error: " + e.getMessage(), LogDb.DASHBOARD); + addActionError("Error syncing code analysis collection: " + apiCollectionName); + return ERROR.toUpperCase(); + } + } + } + + loggerMaker.infoAndAddToDb("Updated code analysis collection: " + apiCollectionName, LogDb.DASHBOARD); + loggerMaker.infoAndAddToDb("Source code endpoints count: " + codeAnalysisApisMap.size(), LogDb.DASHBOARD); + + if (isLastBatch) {//Remove scheduled state from codeAnalysisRepo + Bson sourceCodeFilter; + if (this.codeAnalysisRepo.getSourceCodeType() == CodeAnalysisRepo.SourceCodeType.BITBUCKET) { + sourceCodeFilter = Filters.or( + Filters.eq(CodeAnalysisRepo.SOURCE_CODE_TYPE, this.codeAnalysisRepo.getSourceCodeType()), + Filters.exists(CodeAnalysisRepo.SOURCE_CODE_TYPE, false) + + ); + } else { + sourceCodeFilter = Filters.eq(CodeAnalysisRepo.SOURCE_CODE_TYPE, this.codeAnalysisRepo.getSourceCodeType()); + } + + Bson filters = Filters.and( + Filters.eq(CodeAnalysisRepo.REPO_NAME, this.codeAnalysisRepo.getRepoName()), + Filters.eq(CodeAnalysisRepo.PROJECT_NAME, this.codeAnalysisRepo.getProjectName()), + sourceCodeFilter + ); + + CodeAnalysisRepoDao.instance.updateOneNoUpsert(filters, Updates.set(CodeAnalysisRepo.LAST_RUN, Context.now())); + loggerMaker.infoAndAddToDb("Updated last run for project:" + codeAnalysisRepo.getProjectName() + " repo:" + codeAnalysisRepo.getRepoName(), LogDb.DASHBOARD); + } + + return SUCCESS.toUpperCase(); + } + + public List getCodeAnalysisApisList() { + return codeAnalysisApisList; + } + + public void setCodeAnalysisApisList(List codeAnalysisApisList) { + this.codeAnalysisApisList = codeAnalysisApisList; + } + + + List reposToRun = new ArrayList<>(); + public String updateRepoLastRun() { + Bson sourceCodeFilter; + if (codeAnalysisRepo == null) { + loggerMaker.errorAndAddToDb("Code analysis repo is null", LogDb.DASHBOARD); + addActionError("Code analysis repo is null"); + return ERROR.toUpperCase(); + } + + if (this.codeAnalysisRepo.getSourceCodeType() == CodeAnalysisRepo.SourceCodeType.BITBUCKET) { + sourceCodeFilter = Filters.or( + Filters.eq(CodeAnalysisRepo.SOURCE_CODE_TYPE, this.codeAnalysisRepo.getSourceCodeType()), + Filters.exists(CodeAnalysisRepo.SOURCE_CODE_TYPE, false) + + ); + } else { + sourceCodeFilter = Filters.eq(CodeAnalysisRepo.SOURCE_CODE_TYPE, this.codeAnalysisRepo.getSourceCodeType()); + } + + Bson filters = Filters.and( + Filters.eq(CodeAnalysisRepo.REPO_NAME, this.codeAnalysisRepo.getRepoName()), + Filters.eq(CodeAnalysisRepo.PROJECT_NAME, this.codeAnalysisRepo.getProjectName()), + sourceCodeFilter + ); + + CodeAnalysisRepoDao.instance.updateOneNoUpsert(filters, Updates.set(CodeAnalysisRepo.LAST_RUN, Context.now())); + loggerMaker.infoAndAddToDb("Updated last run for project:" + codeAnalysisRepo.getProjectName() + " repo:" + codeAnalysisRepo.getRepoName(), LogDb.DASHBOARD); + return SUCCESS.toUpperCase(); + } + public String findReposToRun() { + reposToRun = CodeAnalysisRepoDao.instance.findAll( + Filters.expr( + Document.parse("{ $gt: [ \"$" + CodeAnalysisRepo.SCHEDULE_TIME + "\", \"$" + CodeAnalysisRepo.LAST_RUN + "\" ] }") + ) + ); + return SUCCESS.toUpperCase(); + } + + public List getReposToRun() { + return reposToRun; + } + + public void setRepoName(String repoName) { + this.repoName = repoName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public boolean getIsLastBatch() { + return isLastBatch; + } + + public void setIsLastBatch(boolean isLastBatch) { + this.isLastBatch = isLastBatch; + } + + public CodeAnalysisRepo getCodeAnalysisRepo() { + return codeAnalysisRepo; + } + + public void setCodeAnalysisRepo(CodeAnalysisRepo codeAnalysisRepo) { + this.codeAnalysisRepo = codeAnalysisRepo; + } +} \ No newline at end of file diff --git a/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java b/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java index 0f29ad65a8..8ccf15ff29 100644 --- a/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java +++ b/apps/database-abstractor/src/main/java/com/akto/action/DbAction.java @@ -2,7 +2,9 @@ import com.akto.dao.*; import com.akto.dao.context.Context; -import com.akto.dao.settings.DataControlSettingsDao; +import com.akto.dao.test_editor.YamlTemplateDao; +import com.akto.dao.traffic_collector.TrafficCollectorInfoDao; +import com.akto.dao.traffic_collector.TrafficCollectorMetricsDao; import com.akto.data_actor.DbLayer; import com.akto.dto.*; import com.akto.dto.ApiInfo.ApiInfoKey; @@ -10,6 +12,7 @@ import com.akto.dto.billing.Tokens; import com.akto.dto.bulk_updates.BulkUpdates; import com.akto.dto.bulk_updates.UpdatePayload; +import com.akto.dto.dependency_flow.Node; import com.akto.dto.filter.MergedUrls; import com.akto.dto.runtime_filters.RuntimeFilter; import com.akto.dto.settings.DataControlSettings; @@ -20,37 +23,49 @@ import com.akto.dto.testing.config.TestScript; import com.akto.dto.testing.sources.TestSourceConfig; import com.akto.dto.traffic.SampleData; +import com.akto.dto.traffic.SuspectSampleData; import com.akto.dto.traffic.TrafficInfo; +import com.akto.dto.traffic_collector.TrafficCollectorMetrics; import com.akto.dto.traffic_metrics.TrafficMetrics; import com.akto.dto.type.SingleTypeInfo; +import com.akto.notifications.slack.APITestStatusAlert; +import com.akto.notifications.slack.NewIssuesModel; +import com.akto.notifications.slack.SlackAlerts; +import com.akto.notifications.slack.SlackSender; +import com.akto.util.enums.GlobalEnums; +import com.akto.utils.CustomAuthUtil; import com.akto.utils.KafkaUtils; +import com.akto.utils.RedactAlert; +import com.akto.utils.SampleDataLogs; import com.akto.dto.type.URLMethods; import com.akto.dto.type.URLMethods.Method; +import com.akto.testing.TestExecutor; +import com.akto.trafficFilter.HostFilter; +import com.akto.trafficFilter.ParamFilter; +import com.akto.usage.UsageMetricCalculator; +import com.akto.util.Constants; import com.akto.dto.usage.MetricTypes; +import com.akto.log.LoggerMaker; +import com.akto.log.LoggerMaker.LogDb; import com.akto.util.enums.GlobalEnums.TestErrorSource; +import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.opensymphony.xwork2.Action; import com.opensymphony.xwork2.ActionSupport; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; import com.mongodb.client.model.*; -import org.bson.Document; import org.bson.conversions.Bson; import org.bson.types.ObjectId; - import com.google.gson.Gson; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; public class DbAction extends ActionSupport { - + static final ScheduledExecutorService service = Executors.newSingleThreadScheduledExecutor(); long count; List customDataTypes; List aktoDataTypes; @@ -75,9 +90,12 @@ public class DbAction extends ActionSupport { List writesForTrafficInfo; List writesForTrafficMetrics; List writesForTestingRunIssues; + List writesForSuspectSampleData; List dependencyNodeList; TestScript testScript; + private static final LoggerMaker loggerMaker = new LoggerMaker(DbAction.class, LogDb.DB_ABS); + public List getWritesForTestingRunIssues() { return writesForTestingRunIssues; } @@ -137,6 +155,7 @@ public void setTestSourceConfig(TestSourceConfig testSourceConfig) { String logicalGroupName; BasicDBList issuesIds; List activeAdvancedFilters; + Set mergedUrls; List currentlyRunningTests; String state; Bson filter; @@ -199,7 +218,7 @@ public void setIssuesIds(BasicDBList issuesIds) { TestRoles testRole; List testRoles; Map testingRunResultSummaryMap; - TestingRunResult testingRunResult; + BasicDBObject testingRunResult; Tokens token; WorkflowTest workflowTest; List yamlTemplates; @@ -207,7 +226,7 @@ public void setIssuesIds(BasicDBList issuesIds) { int scheduleTs; private static final Gson gson = new Gson(); - ObjectMapper objectMapper = new ObjectMapper(); + ObjectMapper objectMapper = new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false).configure(DeserializationFeature.FAIL_ON_INVALID_SUBTYPE, false).configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); KafkaUtils kafkaUtils = new KafkaUtils(); String endpointLogicalGroupId; String vpcId; @@ -233,6 +252,20 @@ public void setDeltaUsage(int deltaUsage) { } DataControlSettings dataControlSettings; + BasicDBList metricsData; + + int deltaPeriodValue; + String uuid; + int currTime; + OtpTestData otpTestData; + RecordedLoginFlowInput recordedLoginFlowInput; + LoginFlowStepsData loginFlowStepsData; + int userId; + Map valuesMap; + Node node; + List nodes; + boolean removeZeroLevel; + public String fetchDataControlSettings() { try { String prevCommand = ""; @@ -243,6 +276,7 @@ public String fetchDataControlSettings() { } dataControlSettings = DbLayer.fetchDataControlSettings(prevResult, prevCommand); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in fetchDataControlSettings " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -257,6 +291,7 @@ public String fetchCustomDataTypes() { } customDataTypes = customDataTypeMappers; } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in fetchCustomDataTypes " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -266,6 +301,7 @@ public String fetchAktoDataTypes() { try { aktoDataTypes = DbLayer.fetchAktoDataTypes(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in fetchAktoDataTypes " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -280,6 +316,7 @@ public String fetchCustomAuthTypes() { } customAuthTypes = customAuthTypeMappers; } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in fetchCustomAuthTypes " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -289,6 +326,7 @@ public String updateApiCollectionNameForVxlan() { try { DbLayer.updateApiCollectionName(vxlanId, name); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in updateApiCollectionNameForVxlan " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -298,6 +336,7 @@ public String updateCidrList() { try { DbLayer.updateCidrList(cidrList); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in updateCidrList " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -308,6 +347,7 @@ public String fetchAccountSettings() { int accountId = Context.accountId.get(); accountSettings = DbLayer.fetchAccountSettings(accountId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in fetchAccountSettings " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -317,6 +357,7 @@ public String fetchApiInfos() { try { apiInfos = DbLayer.fetchApiInfos(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in fetchApiInfos " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -326,42 +367,177 @@ public String fetchNonTrafficApiInfos() { try { apiInfos = DbLayer.fetchNonTrafficApiInfos(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in fetchNonTrafficApiInfos " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); } + + TrafficCollectorMetrics trafficCollectorMetrics = null; + public String updateTrafficCollectorMetrics() { + if (trafficCollectorMetrics == null) { + loggerMaker.errorAndAddToDb("trafficCollectorMetrics is null"); + return Action.SUCCESS.toUpperCase(); + } + + // update heartbeat + try { + TrafficCollectorInfoDao.instance.updateHeartbeat(trafficCollectorMetrics.getId(), trafficCollectorMetrics.getRuntimeId()); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error while updating heartbeat: " + e); + } + + // update metrics + try { + TrafficCollectorMetricsDao.instance.updateCount(trafficCollectorMetrics); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error while updating count of traffic collector metrics: " + e); + } + + return Action.SUCCESS.toUpperCase(); + } + public String bulkWriteApiInfo() { + int accountId = Context.accountId.get(); try { List apiInfos = new ArrayList<>(); for (BasicDBObject obj: apiInfoList) { ApiInfo apiInfo = objectMapper.readValue(obj.toJson(), ApiInfo.class); + ApiInfoKey id = apiInfo.getId(); + if (UsageMetricCalculator.getDeactivated().contains(id.getApiCollectionId())) { + continue; + } + if (URLMethods.Method.OPTIONS.equals(id.getMethod()) || URLMethods.Method.OTHER.equals(id.getMethod())) { + continue; + } + if (accountId == 1721887185 && (id.getApiCollectionId() == 1991121043 || id.getApiCollectionId() == -1134993740) && !id.getMethod().equals(Method.OPTIONS)) { + loggerMaker.infoAndAddToDb("auth types for endpoint from runtime " + id.getUrl() + " " + id.getMethod() + " : " + apiInfo.getAllAuthTypesFound()); + } apiInfos.add(apiInfo); } - DbLayer.bulkWriteApiInfo(apiInfos); + if (apiInfos!=null && !apiInfos.isEmpty()) { + SingleTypeInfo.fetchCustomAuthTypes(accountId); + service.schedule(new Runnable() { + public void run() { + Context.accountId.set(accountId); + List customAuthTypes = SingleTypeInfo.getCustomAuthType(accountId); + CustomAuthUtil.calcAuth(apiInfos, customAuthTypes, accountId == 1721887185); + DbLayer.bulkWriteApiInfo(apiInfos); + } + }, 0, TimeUnit.SECONDS); + } } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in bulkWriteApiInfo " + e.toString()); + if (kafkaUtils.isWriteEnabled()) { + kafkaUtils.insertDataSecondary(apiInfoList, "bulkWriteApiInfo", Context.accountId.get()); + } return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); } public String bulkWriteSti() { - System.out.println("bulkWriteSti called"); + loggerMaker.infoAndAddToDb("bulkWriteSti called"); + int accId = Context.accountId.get(); + + Set ignoreHosts = new HashSet<>(); + try { + ignoreHosts = HostFilter.getCollectionSet(accId); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in getting ignore host ids " + e.toString()); + } + if (ignoreHosts == null) { + ignoreHosts = new HashSet<>(); + } if (kafkaUtils.isWriteEnabled()) { - int accId = Context.accountId.get(); - kafkaUtils.insertData(writesForSti, "bulkWriteSti", accId); + + try { + Set indicesToDelete = new HashSet<>(); + int i = 0; + for (BulkUpdates bulkUpdate : writesForSti) { + boolean ignore = false; + int apiCollectionId = -1; + String url = null, method = null, param = null; + for (Map.Entry entry : bulkUpdate.getFilters().entrySet()) { + if (entry.getKey().equalsIgnoreCase(SingleTypeInfo._API_COLLECTION_ID)) { + String valStr = entry.getValue().toString(); + int val = Integer.valueOf(valStr); + apiCollectionId = val; + if (ignoreHosts.contains(val)) { + ignore = true; + } + if(UsageMetricCalculator.getDeactivated().contains(apiCollectionId)){ + ignore = true; + } + } else if(entry.getKey().equalsIgnoreCase(SingleTypeInfo._URL)){ + url = entry.getValue().toString(); + } else if(entry.getKey().equalsIgnoreCase(SingleTypeInfo._METHOD)){ + method = entry.getValue().toString(); + if ("OPTIONS".equals(method) || "CONNECT".equals(method)) { + ignore = true; + } + } else if(entry.getKey().equalsIgnoreCase(SingleTypeInfo._PARAM)){ + param = entry.getValue().toString(); + } + } + if (!ignore && apiCollectionId != -1 && url != null && method != null && param!=null) { + boolean isNew = ParamFilter.isNewEntry(accId, apiCollectionId, url, method, param); + if (!isNew) { + ignore = true; + } + } + if(ignore){ + indicesToDelete.add(i); + } + i++; + } + + if (writesForSti != null && !writesForSti.isEmpty() && + indicesToDelete != null && !indicesToDelete.isEmpty()) { + int size = writesForSti.size(); + List tempWrites = new ArrayList<>(); + for (int index = 0; index < size; index++) { + if (indicesToDelete.contains(index)) { + continue; + } + tempWrites.add(writesForSti.get(index)); + } + writesForSti = tempWrites; + int newSize = writesForSti.size(); + loggerMaker.infoAndAddToDb(String.format("Original writes: %d Final writes: %d", size, newSize)); + } + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in ignore STI updates " + e.toString()); + e.printStackTrace(); + } + + if (writesForSti != null && !writesForSti.isEmpty()) { + kafkaUtils.insertData(writesForSti, "bulkWriteSti", accId); + } + } else { - System.out.println("Entering writes size: " + writesForSti.size()); + loggerMaker.infoAndAddToDb("Entering writes size: " + writesForSti.size()); try { ArrayList> writes = new ArrayList<>(); + int ignoreCount =0; for (BulkUpdates bulkUpdate: writesForSti) { List filters = new ArrayList<>(); + boolean ignore = false; for (Map.Entry entry : bulkUpdate.getFilters().entrySet()) { if (entry.getKey().equalsIgnoreCase("isUrlParam")) { continue; } - if (entry.getKey().equalsIgnoreCase("apiCollectionId") || entry.getKey().equalsIgnoreCase("responseCode")) { + if (entry.getKey().equalsIgnoreCase("apiCollectionId")) { + String valStr = entry.getValue().toString(); + int val = Integer.valueOf(valStr); + if (ignoreHosts.contains(val)) { + ignore = true; + break; + } + filters.add(Filters.eq(entry.getKey(), val)); + } else if (entry.getKey().equalsIgnoreCase("responseCode")) { String valStr = entry.getValue().toString(); int val = Integer.valueOf(valStr); filters.add(Filters.eq(entry.getKey(), val)); @@ -369,6 +545,10 @@ public String bulkWriteSti() { filters.add(Filters.eq(entry.getKey(), entry.getValue())); } } + if (ignore) { + ignoreCount++; + continue; + } List urlParamQuery; if ((Boolean) bulkUpdate.getFilters().get("isUrlParam") == true) { urlParamQuery = Collections.singletonList(true); @@ -404,8 +584,6 @@ public String bulkWriteSti() { } } - System.out.println("filters: " + filters.toString()); - if (isDeleteWrite) { writes.add( new DeleteOneModel<>(Filters.and(filters), new DeleteOptions()) @@ -416,18 +594,14 @@ public String bulkWriteSti() { ); } } + + loggerMaker.infoAndAddToDb(String.format("Consumer data: %d ignored: %d writes: %d", writesForSti.size(), ignoreCount, writes.size())); - DbLayer.bulkWriteSingleTypeInfo(writes); - } catch (Exception e) { - String err = "Error: "; - if (e != null && e.getStackTrace() != null && e.getStackTrace().length > 0) { - StackTraceElement stackTraceElement = e.getStackTrace()[0]; - err = String.format("Err msg: %s\nClass: %s\nFile: %s\nLine: %d", err, stackTraceElement.getClassName(), stackTraceElement.getFileName(), stackTraceElement.getLineNumber()); - } else { - err = String.format("Err msg: %s\nStackTrace not available", err); - e.printStackTrace(); + if (writes != null && !writes.isEmpty()) { + DbLayer.bulkWriteSingleTypeInfo(writes); } - System.out.println(err); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in bulkWriteSti " + e.toString()); return Action.ERROR.toUpperCase(); } } @@ -435,18 +609,22 @@ public String bulkWriteSti() { } public String bulkWriteSampleData() { + int accId = Context.accountId.get(); if (kafkaUtils.isWriteEnabled()) { - int accId = Context.accountId.get(); kafkaUtils.insertData(writesForSampleData, "bulkWriteSampleData", accId); } else { try { - System.out.println("called"); + loggerMaker.infoAndAddToDb("bulkWriteSampleData called"); ArrayList> writes = new ArrayList<>(); for (BulkUpdates bulkUpdate: writesForSampleData) { Map mObj = (Map) bulkUpdate.getFilters().get("_id"); String apiCollectionIdStr = mObj.get("apiCollectionId").toString(); int apiCollectionId = Integer.valueOf(apiCollectionIdStr); + if(UsageMetricCalculator.getDeactivated().contains(apiCollectionId)){ + continue; + } + String bucketEndEpochStr = mObj.get("bucketEndEpoch").toString(); int bucketEndEpoch = Integer.valueOf(bucketEndEpochStr); @@ -455,14 +633,22 @@ public String bulkWriteSampleData() { String responseCodeStr = mObj.get("responseCode").toString(); int responseCode = Integer.valueOf(responseCodeStr); - + + String url = (String) mObj.get("url"); + String method = (String) mObj.get("method"); + + if ("OPTIONS".equals(method) || "CONNECT".equals(method)) { + continue; + } + Bson filters = Filters.and(Filters.eq("_id.apiCollectionId", apiCollectionId), Filters.eq("_id.bucketEndEpoch", bucketEndEpoch), Filters.eq("_id.bucketStartEpoch", bucketStartEpoch), - Filters.eq("_id.method", mObj.get("method")), + Filters.eq("_id.method", method), Filters.eq("_id.responseCode", responseCode), - Filters.eq("_id.url", mObj.get("url"))); + Filters.eq("_id.url", url)); List updatePayloadList = bulkUpdate.getUpdates(); + SampleDataLogs.printLog(apiCollectionId, method, url); List updates = new ArrayList<>(); for (String payload: updatePayloadList) { @@ -476,6 +662,12 @@ public String bulkWriteSampleData() { val.add(dVal.get(i).intValue()); } updates.add(Updates.setOnInsert(field, val)); + } else if(field.equals(SampleData.SAMPLES)){ + List dVal = (List) json.get("val"); + RedactAlert.submitSampleDataForChecking(dVal, apiCollectionId, method, url); + SampleDataLogs.insertCount(apiCollectionId, method, url, dVal.size()); + updatePayload = new UpdatePayload((String) json.get("field"), dVal , (String) json.get("op")); + updates.add(Updates.pushEach(updatePayload.getField(), dVal, new PushOptions().slice(-10))); } else { List dVal = (List) json.get("val"); updatePayload = new UpdatePayload((String) json.get("field"), dVal , (String) json.get("op")); @@ -486,17 +678,12 @@ public String bulkWriteSampleData() { new UpdateOneModel<>(filters, Updates.combine(updates), new UpdateOptions().upsert(true)) ); } - DbLayer.bulkWriteSampleData(writes); - } catch (Exception e) { - String err = "Error: "; - if (e != null && e.getStackTrace() != null && e.getStackTrace().length > 0) { - StackTraceElement stackTraceElement = e.getStackTrace()[0]; - err = String.format("Err msg: %s\nClass: %s\nFile: %s\nLine: %d", err, stackTraceElement.getClassName(), stackTraceElement.getFileName(), stackTraceElement.getLineNumber()); - } else { - err = String.format("Err msg: %s\nStackTrace not available", err); - e.printStackTrace(); + if(writes!=null && !writes.isEmpty()){ + DbLayer.bulkWriteSampleData(writes); } - System.out.println(err); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in bulkWriteSampleData " + e.toString()); + e.printStackTrace(); return Action.ERROR.toUpperCase(); } } @@ -504,24 +691,50 @@ public String bulkWriteSampleData() { } public String bulkWriteSensitiveSampleData() { + int accId = Context.accountId.get(); if (kafkaUtils.isWriteEnabled()) { - int accId = Context.accountId.get(); kafkaUtils.insertData(writesForSensitiveSampleData, "bulkWriteSensitiveSampleData", accId); } else { try { - System.out.println("bulkWriteSensitiveSampleData called"); + loggerMaker.infoAndAddToDb("bulkWriteSensitiveSampleData called"); ArrayList> writes = new ArrayList<>(); for (BulkUpdates bulkUpdate: writesForSensitiveSampleData) { Bson filters = Filters.empty(); + int apiCollectionId = 0; + boolean ignore = false; for (Map.Entry entry : bulkUpdate.getFilters().entrySet()) { - if (entry.getKey().equalsIgnoreCase("_id.apiCollectionId") || entry.getKey().equalsIgnoreCase("_id.responseCode")) { + if (entry.getKey().equalsIgnoreCase("_id.apiCollectionId") ) { + String valStr = entry.getValue().toString(); + int val = Integer.valueOf(valStr); + apiCollectionId = val; + if(UsageMetricCalculator.getDeactivated().contains(apiCollectionId)){ + ignore = true; + break; + } + filters = Filters.and(filters, Filters.eq(entry.getKey(), val)); + } else if(entry.getKey().equalsIgnoreCase("_id.responseCode")) { String valStr = entry.getValue().toString(); int val = Integer.valueOf(valStr); filters = Filters.and(filters, Filters.eq(entry.getKey(), val)); } else { filters = Filters.and(filters, Filters.eq(entry.getKey(), entry.getValue())); + try { + String key = entry.getKey(); + String value = (String) entry.getValue(); + if ("_id.method".equals(key) + && ("OPTIONS".equals(value) || "CONNECT".equals(value))) { + ignore = true; + break; + } + } catch (Exception e){ + } } } + + if(ignore){ + continue; + } + List updatePayloadList = bulkUpdate.getUpdates(); boolean isDeleteWrite = false; @@ -555,22 +768,17 @@ public String bulkWriteSensitiveSampleData() { new DeleteOneModel<>(filters, new DeleteOptions()) ); } else { + RedactAlert.submitSensitiveSampleDataCall(apiCollectionId); writes.add( new UpdateOneModel<>(filters, Updates.combine(updates), new UpdateOptions().upsert(true)) ); } } - DbLayer.bulkWriteSensitiveSampleData(writes); - } catch (Exception e) { - String err = "Error: "; - if (e != null && e.getStackTrace() != null && e.getStackTrace().length > 0) { - StackTraceElement stackTraceElement = e.getStackTrace()[0]; - err = String.format("Err msg: %s\nClass: %s\nFile: %s\nLine: %d", err, stackTraceElement.getClassName(), stackTraceElement.getFileName(), stackTraceElement.getLineNumber()); - } else { - err = String.format("Err msg: %s\nStackTrace not available", err); - e.printStackTrace(); + if(writes!=null && !writes.isEmpty()){ + DbLayer.bulkWriteSensitiveSampleData(writes); } - System.out.println(err); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in bulkWriteSensitiveSampleData " + e.toString()); return Action.ERROR.toUpperCase(); } } @@ -580,10 +788,10 @@ public String bulkWriteSensitiveSampleData() { public String bulkWriteTrafficInfo() { if (kafkaUtils.isWriteEnabled()) { int accId = Context.accountId.get(); - kafkaUtils.insertData(writesForTrafficInfo, "bulkWriteTrafficInfo", accId); + kafkaUtils.insertDataTraffic(writesForTrafficInfo, "bulkWriteTrafficInfo", accId); } else { try { - System.out.println("bulkWriteTrafficInfo called"); + loggerMaker.infoAndAddToDb("bulkWriteTrafficInfo called"); ArrayList> writes = new ArrayList<>(); for (BulkUpdates bulkUpdate: writesForTrafficInfo) { Bson filters = Filters.eq("_id", bulkUpdate.getFilters().get("_id")); @@ -614,15 +822,7 @@ public String bulkWriteTrafficInfo() { } DbLayer.bulkWriteTrafficInfo(writes); } catch (Exception e) { - String err = "Error: "; - if (e != null && e.getStackTrace() != null && e.getStackTrace().length > 0) { - StackTraceElement stackTraceElement = e.getStackTrace()[0]; - err = String.format("Err msg: %s\nClass: %s\nFile: %s\nLine: %d", err, stackTraceElement.getClassName(), stackTraceElement.getFileName(), stackTraceElement.getLineNumber()); - } else { - err = String.format("Err msg: %s\nStackTrace not available", err); - e.printStackTrace(); - } - System.out.println(err); + loggerMaker.errorAndAddToDb(e, "Error in bulkWriteTrafficInfo " + e.toString()); return Action.ERROR.toUpperCase(); } } @@ -632,10 +832,10 @@ public String bulkWriteTrafficInfo() { public String bulkWriteTrafficMetrics() { if (kafkaUtils.isWriteEnabled()) { int accId = Context.accountId.get(); - kafkaUtils.insertData(writesForTrafficMetrics, "bulkWriteTrafficMetrics", accId); + kafkaUtils.insertDataTraffic(writesForTrafficMetrics, "bulkWriteTrafficMetrics", accId); } else { try { - System.out.println("bulkWriteTrafficInfo called"); + loggerMaker.infoAndAddToDb("bulkWriteTrafficInfo called"); ArrayList> writes = new ArrayList<>(); for (BulkUpdates bulkUpdate: writesForTrafficMetrics) { @@ -667,15 +867,7 @@ public String bulkWriteTrafficMetrics() { } DbLayer.bulkWriteTrafficMetrics(writes); } catch (Exception e) { - String err = "Error: "; - if (e != null && e.getStackTrace() != null && e.getStackTrace().length > 0) { - StackTraceElement stackTraceElement = e.getStackTrace()[0]; - err = String.format("Err msg: %s\nClass: %s\nFile: %s\nLine: %d", err, stackTraceElement.getClassName(), stackTraceElement.getFileName(), stackTraceElement.getLineNumber()); - } else { - err = String.format("Err msg: %s\nStackTrace not available", err); - e.printStackTrace(); - } - System.out.println(err); + loggerMaker.errorAndAddToDb(e, "Error in bulkWriteTrafficMetrics " + e.toString()); return Action.ERROR.toUpperCase(); } } @@ -721,15 +913,7 @@ public String bulkWriteSensitiveParamInfo() { } DbLayer.bulkWriteSensitiveParamInfo(writes); } catch (Exception e) { - String err = "Error: "; - if (e != null && e.getStackTrace() != null && e.getStackTrace().length > 0) { - StackTraceElement stackTraceElement = e.getStackTrace()[0]; - err = String.format("Err msg: %s\nClass: %s\nFile: %s\nLine: %d", err, stackTraceElement.getClassName(), stackTraceElement.getFileName(), stackTraceElement.getLineNumber()); - } else { - err = String.format("Err msg: %s\nStackTrace not available", err); - e.printStackTrace(); - } - System.out.println(err); + loggerMaker.errorAndAddToDb(e, "Error in bulkWriteSensitiveParamInfo " + e.toString()); return Action.ERROR.toUpperCase(); } } @@ -737,10 +921,6 @@ public String bulkWriteSensitiveParamInfo() { } public String bulkWriteTestingRunIssues() { - if (kafkaUtils.isWriteEnabled()) { - int accId = Context.accountId.get(); - kafkaUtils.insertData(writesForTestingRunIssues, "bulkWriteTestingRunIssues", accId); - } else { try { ArrayList> writes = new ArrayList<>(); for (BulkUpdates bulkUpdate: writesForTestingRunIssues) { @@ -785,6 +965,33 @@ public String bulkWriteTestingRunIssues() { int dVal = val.intValue(); UpdatePayload updatePayload = new UpdatePayload((String) json.get("field"), dVal, (String) json.get("op")); updates.add(Updates.set(updatePayload.getField(), dVal)); + } else if (field.equals(TestingRunIssues.KEY_SEVERITY)) { + + /* + * Fixing severity temp. here, + * cause the info. from mini-testing always contains HIGH. + * To be fixed in mini-testing. + */ + /* + * Severity from info. fixed, + * so taking for dynamic_severity, + * since rest would be same and for old deployments. + */ + String testSubCategory = idd.getTestSubCategory(); + YamlTemplate template = YamlTemplateDao.instance + .findOne(Filters.eq(Constants.ID, testSubCategory)); + String dVal = (String) json.get("val"); + + if (template != null) { + String severity = template.getInfo().getSeverity(); + if (severity != null && !"dynamic_severity".equals(severity)) { + dVal = severity; + } + } + + UpdatePayload updatePayload = new UpdatePayload((String) json.get("field"), dVal, + (String) json.get("op")); + updates.add(Updates.set(updatePayload.getField(), dVal)); } else { String dVal = (String) json.get("val"); UpdatePayload updatePayload = new UpdatePayload((String) json.get("field"), dVal, (String) json.get("op")); @@ -798,17 +1005,11 @@ public String bulkWriteTestingRunIssues() { } DbLayer.bulkWriteTestingRunIssues(writes); } catch (Exception e) { - String err = "Error: "; - if (e != null && e.getStackTrace() != null && e.getStackTrace().length > 0) { - StackTraceElement stackTraceElement = e.getStackTrace()[0]; - err = String.format("Err msg: %s\nClass: %s\nFile: %s\nLine: %d", err, stackTraceElement.getClassName(), stackTraceElement.getFileName(), stackTraceElement.getLineNumber()); - } else { - err = String.format("Err msg: %s\nStackTrace not available", err); - e.printStackTrace(); + loggerMaker.errorAndAddToDb(e, "Error in bulkWriteTestingRunIssues " + e.toString()); + if (kafkaUtils.isWriteEnabled()) { + kafkaUtils.insertDataSecondary(writesForTestingRunIssues, "bulkWriteTestingRunIssues", Context.accountId.get()); } - System.out.println(err); return Action.ERROR.toUpperCase(); - } } return Action.SUCCESS.toUpperCase(); } @@ -817,6 +1018,7 @@ public String findTestSourceConfig(){ try { testSourceConfig = DbLayer.findTestSourceConfig(subType); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in findTestSourceConfig " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -826,6 +1028,7 @@ public String fetchApiConfig() { try { apiConfig = DbLayer.fetchApiconfig(configName); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchApiConfig " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -849,6 +1052,7 @@ public String fetchStiBasedOnHostHeaders() { ObjectId lastTsObjectId = lastStiId != null ? new ObjectId(lastStiId) : null; stis = DbLayer.fetchStiBasedOnHostHeaders(lastTsObjectId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchStiBasedOnHostHeaders " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -858,6 +1062,7 @@ public String fetchDeactivatedCollections() { try { apiCollectionIds = DbLayer.fetchDeactivatedCollections(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchDeactivatedCollections " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -868,6 +1073,7 @@ public String updateUsage() { MetricTypes metric = MetricTypes.valueOf(metricType); DbLayer.updateUsage(metric, deltaUsage); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateUsage " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -877,13 +1083,18 @@ public String fetchApiCollectionIds() { try { apiCollectionIds = DbLayer.fetchApiCollectionIds(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchApiCollectionIds " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); } public String fetchEstimatedDocCount() { - count = DbLayer.fetchEstimatedDocCount(); + try { + count = DbLayer.fetchEstimatedDocCount(); + } catch (Exception e){ + loggerMaker.errorAndAddToDb(e, "Error in fetchEstimatedDocCount " + e.toString()); + } return Action.SUCCESS.toUpperCase(); } @@ -891,6 +1102,7 @@ public String fetchRuntimeFilters() { try { runtimeFilters = DbLayer.fetchRuntimeFilters(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchRuntimeFilters " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -900,6 +1112,7 @@ public String fetchNonTrafficApiCollectionsIds() { try { apiCollectionIds = DbLayer.fetchNonTrafficApiCollectionsIds(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchNonTrafficApiCollectionsIds " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -909,6 +1122,7 @@ public String fetchStiOfCollections() { try { stis = DbLayer.fetchStiOfCollections(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchStiOfCollections " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -918,6 +1132,7 @@ public String getUnsavedSensitiveParamInfos() { try { sensitiveParamInfos = DbLayer.getUnsavedSensitiveParamInfos(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in getUnsavedSensitiveParamInfos " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -927,6 +1142,7 @@ public String fetchSingleTypeInfo() { try { stis = DbLayer.fetchSingleTypeInfo(lastFetchTimestamp, lastSeenObjectId, resolveLoop); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchSingleTypeInfo " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -936,6 +1152,7 @@ public String fetchAllSingleTypeInfo() { try { stis = DbLayer.fetchAllSingleTypeInfo(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchAllSingleTypeInfo " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -945,6 +1162,7 @@ public String fetchActiveAccount() { try { account = DbLayer.fetchActiveAccount(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchActiveAccount " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -954,6 +1172,7 @@ public String updateRuntimeVersion() { try { DbLayer.updateRuntimeVersion(fieldName, version); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateRuntimeVersion " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -963,6 +1182,7 @@ public String updateKafkaIp() { try { DbLayer.updateKafkaIp(currentInstanceIp); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateKafkaIp " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -972,6 +1192,7 @@ public String fetchEndpointsInCollection() { try { endpoints = DbLayer.fetchEndpointsInCollection(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchEndpointsInCollection " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -981,6 +1202,7 @@ public String fetchApiCollections() { try { apiCollections = DbLayer.fetchApiCollections(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchApiCollections " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -990,6 +1212,7 @@ public String createCollectionSimple() { try { DbLayer.createCollectionSimple(vxlanId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in createCollectionSimple " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -999,6 +1222,7 @@ public String createCollectionForHost() { try { DbLayer.createCollectionForHost(host, colId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in createCollectionForHost " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1006,9 +1230,14 @@ public String createCollectionForHost() { public String insertRuntimeLog() { try { + int accId = Context.accountId.get(); + if (accId == 1733164172) { + return Action.SUCCESS.toUpperCase(); + } Log dbLog = new Log(log.getString("log"), log.getString("key"), log.getInt("timestamp")); DbLayer.insertRuntimeLog(dbLog); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in insertRuntimeLog " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1016,9 +1245,14 @@ public String insertRuntimeLog() { public String insertAnalyserLog() { try { + int accId = Context.accountId.get(); + if (accId == 1733164172) { + return Action.SUCCESS.toUpperCase(); + } Log dbLog = new Log(log.getString("log"), log.getString("key"), log.getInt("timestamp")); DbLayer.insertAnalyserLog(dbLog); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in insertAnalyserLog " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1028,6 +1262,7 @@ public String modifyHybridSaasSetting() { try { DbLayer.modifyHybridSaasSetting(isHybridSaas); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in modifyHybridSaasSetting " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1037,6 +1272,7 @@ public String fetchSetup() { try { setup = DbLayer.fetchSetup(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchSetup " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1046,6 +1282,7 @@ public String fetchOrganization() { try { organization = DbLayer.fetchOrganization(accountId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchOrganization " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1058,6 +1295,7 @@ public String createTRRSummaryIfAbsent() { trrs = DbLayer.createTRRSummaryIfAbsent(testingRunHexId, start); trrs.setTestingRunHexId(trrs.getTestingRunId().toHexString()); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in createTRRSummaryIfAbsent " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1077,6 +1315,7 @@ public String findPendingTestingRun() { } } } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in findPendingTestingRun " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1089,6 +1328,7 @@ public String findPendingTestingRunResultSummary() { trrs.setTestingRunHexId(trrs.getTestingRunId().toHexString()); } } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in findPendingTestingRunResultSummary " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1098,6 +1338,7 @@ public String findTestingRunConfig() { try { testingRunConfig = DbLayer.findTestingRunConfig(testIdConfig); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in findTestingRunConfig " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1107,6 +1348,7 @@ public String findTestingRun() { try { testingRun = DbLayer.findTestingRun(testingRunId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in findTestingRun " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1116,6 +1358,7 @@ public String apiInfoExists() { try { exists = DbLayer.apiInfoExists(apiCollectionIds, urls); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in apiInfoExists " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1125,6 +1368,7 @@ public String fetchAccessMatrixUrlToRole() { try { accessMatrixUrlToRole = DbLayer.fetchAccessMatrixUrlToRole(apiInfoKey); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchAccessMatrixUrlToRole " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1134,6 +1378,7 @@ public String fetchAllApiCollectionsMeta() { try { apiCollections = DbLayer.fetchAllApiCollectionsMeta(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchAllApiCollectionsMeta " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1143,6 +1388,7 @@ public String fetchApiCollectionMeta() { try { apiCollection = DbLayer.fetchApiCollectionMeta(apiCollectionId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchApiCollectionMeta " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1152,6 +1398,7 @@ public String fetchApiInfo() { try { apiInfo = DbLayer.fetchApiInfo(apiInfoKey); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchApiInfo " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1161,6 +1408,7 @@ public String fetchEndpointLogicalGroup() { try { endpointLogicalGroup = DbLayer.fetchEndpointLogicalGroup(logicalGroupName); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchEndpointLogicalGroup " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1170,6 +1418,7 @@ public String fetchEndpointLogicalGroupById() { try { endpointLogicalGroup = DbLayer.fetchEndpointLogicalGroupById(endpointLogicalGroupId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchEndpointLogicalGroupById " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1192,15 +1441,7 @@ public String fetchIssuesByIds() { } testingRunIssues = DbLayer.fetchIssuesByIds(ids); } catch (Exception e) { - String err = "Error: "; - if (e != null && e.getStackTrace() != null && e.getStackTrace().length > 0) { - StackTraceElement stackTraceElement = e.getStackTrace()[0]; - err = String.format("Err msg: %s\nClass: %s\nFile: %s\nLine: %d", err, stackTraceElement.getClassName(), stackTraceElement.getFileName(), stackTraceElement.getLineNumber()); - } else { - err = String.format("Err msg: %s\nStackTrace not available", err); - e.printStackTrace(); - } - System.out.println(err); + loggerMaker.errorAndAddToDb(e, "Error in fetchIssuesByIds " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1210,6 +1451,7 @@ public String fetchLatestTestingRunResult() { try { testingRunResults = DbLayer.fetchLatestTestingRunResult(testingRunResultSummaryId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchLatestTestingRunResult " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1219,6 +1461,7 @@ public String fetchLatestTestingRunResultBySummaryId() { try { testingRunResults = DbLayer.fetchLatestTestingRunResultBySummaryId(summaryId, limit, skip); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchLatestTestingRunResultBySummaryId " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1228,6 +1471,7 @@ public String fetchMatchParamSti() { try { stis = DbLayer.fetchMatchParamSti(apiCollectionId, param); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchMatchParamSti " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1237,6 +1481,7 @@ public String fetchOpenIssues() { try { testingRunIssues = DbLayer.fetchOpenIssues(summaryId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchOpenIssues " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1246,6 +1491,7 @@ public String fetchPendingAccessMatrixInfo() { try { accessMatrixTaskInfos = DbLayer.fetchPendingAccessMatrixInfo(ts); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchPendingAccessMatrixInfo " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1255,6 +1501,7 @@ public String fetchSampleData() { try { sampleDatas = DbLayer.fetchSampleData(apiCollectionIdsSet, skip); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchSampleData " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1264,6 +1511,7 @@ public String fetchSampleDataById() { try { sampleData = DbLayer.fetchSampleDataById(apiCollectionId, url, method); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchSampleDataById " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1273,6 +1521,7 @@ public String fetchSampleDataByIdMethod() { try { sampleData = DbLayer.fetchSampleDataByIdMethod(apiCollectionId, url, methodVal); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchSampleDataByIdMethod " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1282,6 +1531,7 @@ public String fetchTestRole() { try { testRole = DbLayer.fetchTestRole(key); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchTestRole " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1291,6 +1541,7 @@ public String fetchTestRoles() { try { testRoles = DbLayer.fetchTestRoles(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchTestRoles " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1300,6 +1551,7 @@ public String fetchTestRolesForRoleName() { try { testRoles = DbLayer.fetchTestRolesForRoleName(roleFromTask); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchTestRolesForRoleName " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1309,6 +1561,7 @@ public String fetchTestRolesforId() { try { testRole = DbLayer.fetchTestRolesforId(roleId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchTestRolesforId " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1319,6 +1572,7 @@ public String fetchTestingRunResultSummary() { trrs = DbLayer.fetchTestingRunResultSummary(testingRunResultSummaryId); trrs.setTestingRunHexId(trrs.getTestingRunId().toHexString()); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchTestingRunResultSummary " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1328,6 +1582,7 @@ public String fetchTestingRunResultSummaryMap() { try { testingRunResultSummaryMap = DbLayer.fetchTestingRunResultSummaryMap(testingRunId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchTestingRunResultSummaryMap " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1335,8 +1590,10 @@ public String fetchTestingRunResultSummaryMap() { public String fetchTestingRunResults() { try { - testingRunResult = DbLayer.fetchTestingRunResults(filterForRunResult); + // testingRunResult = DbLayer.fetchTestingRunResults(filterForRunResult); + loggerMaker.errorAndAddToDb("API called fetchTestingRunResults"); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchTestingRunResults " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1346,6 +1603,7 @@ public String fetchToken() { try { token = DbLayer.fetchToken(organizationId, accountId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchToken " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1355,6 +1613,7 @@ public String fetchWorkflowTest() { try { workflowTest = DbLayer.fetchWorkflowTest(workFlowTestId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchWorkflowTest " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1364,6 +1623,7 @@ public String fetchYamlTemplates() { try { yamlTemplates = DbLayer.fetchYamlTemplates(fetchOnlyActive, skip); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchYamlTemplates " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1373,6 +1633,7 @@ public String findApiCollectionByName() { try { apiCollection = DbLayer.findApiCollectionByName(apiCollectionName); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in findApiCollectionByName " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1382,6 +1643,7 @@ public String findApiCollections() { try { apiCollections = DbLayer.findApiCollections(apiCollectionNames); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in findApiCollections " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1391,6 +1653,7 @@ public String findSti() { try { sti = DbLayer.findSti(apiCollectionId, url, method); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in findSti " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1400,6 +1663,7 @@ public String findStiByParam() { try { stis = DbLayer.findStiByParam(apiCollectionId, param); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in findStiByParam " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1409,6 +1673,7 @@ public String findStiWithUrlParamFilters() { try { sti = DbLayer.findStiWithUrlParamFilters(apiCollectionId, url, methodVal, responseCode, isHeader, param, isUrlParam); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in findStiWithUrlParamFilters " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1418,6 +1683,7 @@ public String insertActivity() { try { DbLayer.insertActivity((int) count); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in insertActivity " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1427,6 +1693,7 @@ public String insertApiCollection() { try { DbLayer.insertApiCollection(apiCollectionId, apiCollectionName); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in insertApiCollection " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1436,6 +1703,10 @@ public String insertTestingRunResultSummary() { try { DbLayer.insertTestingRunResultSummary(trrs); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in insertTestingRunResultSummary " + e.toString()); + if (kafkaUtils.isWriteEnabled()) { + kafkaUtils.insertDataSecondary(trrs, "insertTestingRunResultSummary", Context.accountId.get()); + } return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1444,6 +1715,34 @@ public String insertTestingRunResultSummary() { public String insertTestingRunResults() { try { + Map data = new HashMap<>(); + try { + if (this.testingRunResult != null && this.testingRunResult.get("workflowTest") != null) { + Map x = (Map) (((Map) this.testingRunResult.get("workflowTest")) + .get("mapNodeIdToWorkflowNodeDetails")); + if (x != null) { + for (String tmp : x.keySet()) { + ((Map) x.get(tmp)).remove("authMechanism"); + ((Map) x.get(tmp)).remove("customAuthTypes"); + data.put(tmp, objectMapper.convertValue(x.get(tmp), YamlNodeDetails.class)); + } + } + } + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in insertTestingRunResults mapNodeIdToWorkflowNodeDetails" + e.toString()); + e.printStackTrace(); + } + TestingRunResult testingRunResult = objectMapper.readValue(this.testingRunResult.toJson(), TestingRunResult.class); + + try { + if (!data.isEmpty()) { + testingRunResult.getWorkflowTest().setMapNodeIdToWorkflowNodeDetails(data); + } + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in insertTestingRunResults mapNodeIdToWorkflowNodeDetails2" + e.toString()); + e.printStackTrace(); + } + if(testingRunResult.getSingleTestResults()!=null){ testingRunResult.setTestResults(new ArrayList<>(testingRunResult.getSingleTestResults())); }else if(testingRunResult.getMultiExecTestResults() !=null){ @@ -1462,7 +1761,10 @@ public String insertTestingRunResults() { DbLayer.insertTestingRunResults(testingRunResult); } catch (Exception e) { - System.out.println(e.getMessage()); + loggerMaker.errorAndAddToDb(e, "Error in insertTestingRunResults " + e.toString()); + if (kafkaUtils.isWriteEnabled()) { + kafkaUtils.insertDataSecondary(testingRunResult, "insertTestingRunResults", Context.accountId.get()); + } return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1472,6 +1774,10 @@ public String insertWorkflowTestResult() { try { DbLayer.insertWorkflowTestResult(workflowTestResult); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in insertWorkflowTestResult " + e.toString()); + if (kafkaUtils.isWriteEnabled()) { + kafkaUtils.insertDataSecondary(workflowTestResult, "insertWorkflowTestResult", Context.accountId.get()); + } return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1482,6 +1788,7 @@ public String markTestRunResultSummaryFailed() { trrs = DbLayer.markTestRunResultSummaryFailed(testingRunResultSummaryId); trrs.setTestingRunHexId(trrs.getTestingRunId().toHexString()); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in markTestRunResultSummaryFailed " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1491,6 +1798,7 @@ public String updateAccessMatrixInfo() { try { DbLayer.updateAccessMatrixInfo(taskId, frequencyInSeconds); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateAccessMatrixInfo " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1500,6 +1808,7 @@ public String updateAccessMatrixUrlToRoles() { try { DbLayer.updateAccessMatrixUrlToRoles(apiInfoKey, ret); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateAccessMatrixUrlToRoles " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1507,9 +1816,20 @@ public String updateAccessMatrixUrlToRoles() { public String updateIssueCountInSummary() { try { - trrs = DbLayer.updateIssueCountInSummary(summaryId, totalCountIssues, operator); + ObjectId summaryObjectId = null; + if (summaryId != null) { + summaryObjectId = new ObjectId(summaryId); + } + if((operator == null || operator.isEmpty()) && summaryId != null){ + totalCountIssues = TestExecutor.calcTotalCountIssues(summaryObjectId); + trrs = DbLayer.updateIssueCountInSummary(summaryId, totalCountIssues); + }else{ + trrs = DbLayer.updateIssueCountInSummary(summaryId, totalCountIssues, operator); + } trrs.setTestingRunHexId(trrs.getTestingRunId().toHexString()); } catch (Exception e) { + e.printStackTrace(); + loggerMaker.errorAndAddToDb(e, "Error in updateIssueCountInSummary " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1519,6 +1839,7 @@ public String updateIssueCountInTestSummary() { try { DbLayer.updateIssueCountInTestSummary(summaryId, totalCountIssues, false); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateIssueCountInTestSummary " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1528,6 +1849,7 @@ public String updateLastTestedField() { try { DbLayer.updateLastTestedField(apiCollectionId, url, methodVal); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateLastTestedField " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1537,6 +1859,7 @@ public String updateTestInitiatedCountInTestSummary() { try { DbLayer.updateTestInitiatedCountInTestSummary(summaryId, testInitiatedCount); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateTestInitiatedCountInTestSummary " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1546,6 +1869,7 @@ public String updateTestResultsCountInTestSummary() { try { DbLayer.updateTestResultsCountInTestSummary(summaryId, testResultsCount); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateTestResultsCountInTestSummary " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1555,6 +1879,7 @@ public String updateTestRunResultSummary() { try { DbLayer.updateTestRunResultSummary(summaryId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateTestRunResultSummary " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1564,6 +1889,7 @@ public String updateTestRunResultSummaryNoUpsert() { try { DbLayer.updateTestRunResultSummaryNoUpsert(testingRunResultSummaryId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateTestRunResultSummaryNoUpsert " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1573,6 +1899,7 @@ public String updateTestingRun() { try { DbLayer.updateTestingRun(testingRunId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateTestingRun " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1582,6 +1909,7 @@ public String updateTestingRunAndMarkCompleted() { try { DbLayer.updateTestingRunAndMarkCompleted(testingRunId, scheduleTs); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateTestingRunAndMarkCompleted " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1591,6 +1919,7 @@ public String updateTotalApiCountInTestSummary() { try { DbLayer.updateTotalApiCountInTestSummary(summaryId, totalApiCount); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateTotalApiCountInTestSummary " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1600,6 +1929,7 @@ public String modifyHybridTestingSetting() { try { DbLayer.modifyHybridTestingSetting(hybridTestingEnabled); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in modifyHybridTestingSetting " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1607,9 +1937,32 @@ public String modifyHybridTestingSetting() { public String insertTestingLog() { try { + int accId = Context.accountId.get(); + if (accId == 1733164172) { + return Action.SUCCESS.toUpperCase(); + } Log dbLog = new Log(log.getString("log"), log.getString("key"), log.getInt("timestamp")); + + // Skip writing cyborg call logs. + if (dbLog.getLog().contains("ApiExecutor") && + dbLog.getLog().contains("cyborg")) { + return Action.SUCCESS.toUpperCase(); + } + DbLayer.insertTestingLog(dbLog); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in insertTestingLog " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String insertProtectionLog() { + try { + Log dbLog = new Log(log.getString("log"), log.getString("key"), log.getInt("timestamp")); + DbLayer.insertProtectionLog(dbLog); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in insertProtectionLog " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1617,18 +1970,13 @@ public String insertTestingLog() { public String bulkWriteDependencyNodes() { try { - System.out.println("bulkWriteDependencyNodes called"); + loggerMaker.infoAndAddToDb("bulkWriteDependencyNodes called"); DbLayer.bulkWriteDependencyNodes(dependencyNodeList); } catch (Exception e) { - String err = "Error bulkWriteDependencyNodes: "; - if (e != null && e.getStackTrace() != null && e.getStackTrace().length > 0) { - StackTraceElement stackTraceElement = e.getStackTrace()[0]; - err = String.format("Err msg: %s\nClass: %s\nFile: %s\nLine: %d", err, stackTraceElement.getClassName(), stackTraceElement.getFileName(), stackTraceElement.getLineNumber()); - } else { - err = String.format("Err msg: %s\nStackTrace not available", err); - e.printStackTrace(); + loggerMaker.errorAndAddToDb(e, "Error in bulkWriteDependencyNodes " + e.toString()); + if (kafkaUtils.isWriteEnabled()) { + kafkaUtils.insertDataSecondary(dependencyNodeList, "bulkWriteDependencyNodes", Context.accountId.get()); } - System.out.println(err); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1638,6 +1986,52 @@ public String fetchLatestEndpointsForTesting() { try { newEps = DbLayer.fetchLatestEndpointsForTesting(startTimestamp, endTimestamp, apiCollectionId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchLatestEndpointsForTesting " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String insertRuntimeMetricsData() { + try { + DbLayer.insertRuntimeMetricsData(metricsData); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in insertRuntimeMetricsData " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String bulkWriteSuspectSampleData() { + if (kafkaUtils.isWriteEnabled()) { + int accId = Context.accountId.get(); + kafkaUtils.insertData(writesForSuspectSampleData, "bulkWriteSuspectSampleData", accId); + } else { + ArrayList> writes = new ArrayList<>(); + for (BulkUpdates update : writesForSuspectSampleData) { + List updates = update.getUpdates(); + try { + SuspectSampleData sd = objectMapper.readValue( + gson.toJson(gson.fromJson(updates.get(0), Map.class).get("val")), SuspectSampleData.class); + writes.add(new InsertOneModel(sd)); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in bulkWriteSuspectSampleData " + e.toString()); + } + } + try { + DbLayer.bulkWriteSuspectSampleData(writes); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in bulkWriteSuspectSampleData " + e.toString()); + } + } + return Action.SUCCESS.toUpperCase(); + } + + public String fetchFilterYamlTemplates() { + try { + yamlTemplates = DbLayer.fetchFilterYamlTemplates(); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchFilterYamlTemplates " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1646,6 +2040,16 @@ public String fetchLatestEndpointsForTesting() { public String fetchActiveAdvancedFilters(){ try { this.activeAdvancedFilters = DbLayer.fetchActiveFilterTemplates(); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchActiveFilterTemplates " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String fetchMergedUrls() { + try { + this.mergedUrls = DbLayer.fetchMergedUrls(); } catch (Exception e) { return Action.ERROR.toUpperCase(); } @@ -1656,26 +2060,120 @@ public String fetchStatusOfTests(){ try { this.currentlyRunningTests = DbLayer.fetchStatusOfTests(); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchStatusOfTests " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); } - public String updateIssueCountAndStateInSummary(){ + public String updateIssueCountAndStateInSummary() { try { + if (summaryId != null) { + ObjectId summaryObjectId = new ObjectId(summaryId); + totalCountIssues = TestExecutor.calcTotalCountIssues(summaryObjectId); + } trrs = DbLayer.updateIssueCountAndStateInSummary(summaryId, totalCountIssues, state); trrs.setTestingRunHexId(trrs.getTestingRunId().toHexString()); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateIssueCountAndStateInSummary " + e.toString()); return Action.ERROR.toUpperCase(); } + + // send slack alert + try { + sendSlack(trrs, totalCountIssues, Context.accountId.get()); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "error in sending slack alert for testing" + e); + } + return Action.SUCCESS.toUpperCase(); } + public static void sendSlack(TestingRunResultSummary trrs, Map totalCountIssues, int accountId) { + TestingRun testingRun = DbLayer.findTestingRun(trrs.getTestingRunId().toHexString()); + + if (!testingRun.getSendSlackAlert()) { + loggerMaker.infoAndAddToDb("Not sending slack alert for trrs " + trrs.getId()); + return; + } + + String summaryId = trrs.getHexId(); + + int totalApis = trrs.getTotalApis(); + String testType = TestingRun.findTestType(testingRun,trrs); + int countIssues = totalCountIssues.values().stream().mapToInt(Integer::intValue).sum(); + long nextTestRun = testingRun.getPeriodInSeconds() == 0 ? 0 : ((long) testingRun.getScheduleTimestamp() + (long) testingRun.getPeriodInSeconds()); + long scanTimeInSeconds = Math.abs(Context.now() - trrs.getStartTimestamp()); + + String collectionName = null; + TestingEndpoints testingEndpoints = testingRun.getTestingEndpoints(); + if(testingEndpoints != null && testingEndpoints.getType() != null && testingEndpoints.getType().equals(TestingEndpoints.Type.COLLECTION_WISE)) { + CollectionWiseTestingEndpoints collectionWiseTestingEndpoints = (CollectionWiseTestingEndpoints) testingEndpoints; + int apiCollectionId = collectionWiseTestingEndpoints.getApiCollectionId(); + ApiCollection apiCollection = DbLayer.fetchApiCollectionMeta(apiCollectionId); + collectionName = apiCollection.getName(); + } + + int newIssues = 0; + List testingRunIssuesList = DbLayer.fetchOpenIssues(summaryId); + Map apisAffectedCount = new HashMap<>(); + for (TestingRunIssues testingRunIssues: testingRunIssuesList) { + String testSubCategory = testingRunIssues.getId().getTestSubCategory(); + int totalApisAffected = apisAffectedCount.getOrDefault(testSubCategory, 0)+1; + apisAffectedCount.put(testSubCategory, totalApisAffected); + if(testingRunIssues.getCreationTime() > trrs.getStartTimestamp()) newIssues++; + } + + testingRunIssuesList.sort(Comparator.comparing(TestingRunIssues::getSeverity)); + + List newIssuesModelList = new ArrayList<>(); + for(TestingRunIssues testingRunIssues : testingRunIssuesList) { + if(testingRunIssues.getCreationTime() > trrs.getStartTimestamp()) { + String testRunResultId; + if(newIssuesModelList.size() <= 5) { + Bson filterForRunResult = Filters.and( + Filters.eq(TestingRunResult.TEST_RUN_RESULT_SUMMARY_ID, testingRunIssues.getLatestTestingRunSummaryId()), + Filters.eq(TestingRunResult.TEST_SUB_TYPE, testingRunIssues.getId().getTestSubCategory()), + Filters.eq(TestingRunResult.API_INFO_KEY, testingRunIssues.getId().getApiInfoKey()) + ); + TestingRunResult testingRunResult = DbLayer.fetchTestingRunResults(filterForRunResult); + testRunResultId = testingRunResult.getHexId(); + } else testRunResultId = ""; + + String issueCategory = testingRunIssues.getId().getTestSubCategory(); + newIssuesModelList.add(new NewIssuesModel( + issueCategory, + testRunResultId, + apisAffectedCount.get(issueCategory), + testingRunIssues.getCreationTime() + )); + } + } + + SlackAlerts apiTestStatusAlert = new APITestStatusAlert( + testingRun.getName(), + totalCountIssues.getOrDefault(GlobalEnums.Severity.HIGH.name(), 0), + totalCountIssues.getOrDefault(GlobalEnums.Severity.MEDIUM.name(), 0), + totalCountIssues.getOrDefault(GlobalEnums.Severity.LOW.name(), 0), + countIssues, + newIssues, + totalApis, + collectionName, + scanTimeInSeconds, + testType, + nextTestRun, + newIssuesModelList, + testingRun.getHexId(), + summaryId + ); + SlackSender.sendAlert(accountId, apiTestStatusAlert); + } + public String createCollectionSimpleForVpc() { try { - System.out.println("called1 vpcId" + vpcId); DbLayer.createCollectionSimpleForVpc(vxlanId, vpcId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in createCollectionSimpleForVpc " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); @@ -1683,14 +2181,99 @@ public String createCollectionSimpleForVpc() { public String createCollectionForHostAndVpc() { try { - System.out.println("called2 vpcId" + vpcId); DbLayer.createCollectionForHostAndVpc(host, colId, vpcId); } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in createCollectionForHostAndVpc " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String fetchEndpointsInCollectionUsingHost() { + try { + apiInfoList = DbLayer.fetchEndpointsInCollectionUsingHost(apiCollectionId, skip, deltaPeriodValue); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchEndpointsInCollectionUsingHost " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String fetchOtpTestData() { + try { + otpTestData = DbLayer.fetchOtpTestData(uuid, currTime); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchOtpTestData " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String fetchRecordedLoginFlowInput() { + try { + recordedLoginFlowInput = DbLayer.fetchRecordedLoginFlowInput(); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchRecordedLoginFlowInput " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String fetchLoginFlowStepsData() { + try { + loginFlowStepsData = DbLayer.fetchLoginFlowStepsData(userId); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchLoginFlowStepsData " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String updateLoginFlowStepsData() { + try { + DbLayer.updateLoginFlowStepsData(userId, valuesMap); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in updateLoginFlowStepsData " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String fetchDependencyFlowNodesByApiInfoKey() { + try { + node = DbLayer.fetchDependencyFlowNodesByApiInfoKey(apiCollectionId, url, methodVal); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchDependencyFlowNodesByApiInfoKey " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String fetchSampleDataForEndpoints() { + try { + sampleDatas = DbLayer.fetchSampleDataForEndpoints(endpoints); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchSampleDataForEndpoints " + e.toString()); return Action.ERROR.toUpperCase(); } return Action.SUCCESS.toUpperCase(); } + public String fetchNodesForCollectionIds() { + try { + nodes = DbLayer.fetchNodesForCollectionIds(apiCollectionIds,removeZeroLevel, skip); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in fetchNodesForCollectionIds " + e.toString()); + return Action.ERROR.toUpperCase(); + } + return Action.SUCCESS.toUpperCase(); + } + + public String countTestingRunResultSummaries() { + count = DbLayer.countTestingRunResultSummaries(filter); + return Action.SUCCESS.toUpperCase(); + } + List dependencyNodes; public List getDependencyNodes() { @@ -1723,15 +2306,10 @@ public String findDependencyNodes() { public String fetchTestScript() { try { testScript = DbLayer.fetchTestScript(); - return SUCCESS.toUpperCase(); } catch (Exception e) { - System.out.println("Error in fetchTestScript " + e.toString()); + loggerMaker.errorAndAddToDb(e, "Error in fetchTestScript " + e.toString()); return Action.ERROR.toUpperCase(); } - } - - public String countTestingRunResultSummaries() { - count = DbLayer.countTestingRunResultSummaries(filter); return Action.SUCCESS.toUpperCase(); } @@ -2393,7 +2971,7 @@ public void setTotalApiCount(int totalApiCount) { this.totalApiCount = totalApiCount; } - public boolean isHybridTestingEnabled() { + public boolean getHybridTestingEnabled() { return hybridTestingEnabled; } @@ -2513,11 +3091,11 @@ public void setTestingRunResultSummaryMap(Map this.testingRunResultSummaryMap = testingRunResultSummaryMap; } - public TestingRunResult getTestingRunResult() { + public BasicDBObject getTestingRunResult() { return testingRunResult; } - public void setTestingRunResult(TestingRunResult testingRunResult) { + public void setTestingRunResult(BasicDBObject testingRunResult) { this.testingRunResult = testingRunResult; } @@ -2613,6 +3191,26 @@ public void setNewEps(List newEps) { this.newEps = newEps; } + public BasicDBList getMetricsData() { + return metricsData; + } + + public void setMetricsData(BasicDBList metricsData) { + this.metricsData = metricsData; + } + + public void setTrafficCollectorMetrics(TrafficCollectorMetrics trafficCollectorMetrics) { + this.trafficCollectorMetrics = trafficCollectorMetrics; + } + + public List getWritesForSuspectSampleData() { + return writesForSuspectSampleData; + } + + public void setWritesForSuspectSampleData(List writesForSuspectSampleData) { + this.writesForSuspectSampleData = writesForSuspectSampleData; + } + public List getActiveAdvancedFilters() { return activeAdvancedFilters; } @@ -2621,6 +3219,14 @@ public void setActiveAdvancedFilters(List activeAdvancedFilters) { this.activeAdvancedFilters = activeAdvancedFilters; } + public Set getMergedUrls() { + return mergedUrls; + } + + public void setMergedUrls(Set mergedUrls) { + this.mergedUrls = mergedUrls; + } + public List getCurrentlyRunningTests() { return currentlyRunningTests; } @@ -2645,14 +3251,103 @@ public void setVpcId(String vpcId) { this.vpcId = vpcId; } - public TestScript getTestScript() { - return testScript; + + public int getDeltaPeriodValue() { + return deltaPeriodValue; + } + + public void setDeltaPeriodValue(int deltaPeriodValue) { + this.deltaPeriodValue = deltaPeriodValue; + } + + public String getUuid() { + return uuid; + } + + public void setUuid(String uuid) { + this.uuid = uuid; } + public int getCurrTime() { + return currTime; + } + + public void setCurrTime(int currTime) { + this.currTime = currTime; + } + + public OtpTestData getOtpTestData() { + return otpTestData; + } + + public void setOtpTestData(OtpTestData otpTestData) { + this.otpTestData = otpTestData; + } + + public RecordedLoginFlowInput getRecordedLoginFlowInput() { + return recordedLoginFlowInput; + } + + public void setRecordedLoginFlowInput(RecordedLoginFlowInput recordedLoginFlowInput) { + this.recordedLoginFlowInput = recordedLoginFlowInput; + } + + public LoginFlowStepsData getLoginFlowStepsData() { + return loginFlowStepsData; + } + + public void setLoginFlowStepsData(LoginFlowStepsData loginFlowStepsData) { + this.loginFlowStepsData = loginFlowStepsData; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public Map getValuesMap() { + return valuesMap; + } + + public void setValuesMap(Map valuesMap) { + this.valuesMap = valuesMap; + } + + public Node getNode() { + return node; + } + + public void setNode(Node node) { + this.node = node; + } + + public List getNodes() { + return nodes; + } + + public void setNodes(List nodes) { + this.nodes = nodes; + } + + public boolean getRemoveZeroLevel() { + return removeZeroLevel; + } + + public void setRemoveZeroLevel(boolean removeZeroLevel) { + this.removeZeroLevel = removeZeroLevel; + } + public void setFilter(Bson filter) { this.filter = filter; } + public TestScript getTestScript() { + return testScript; + } + public String getOperator() { return operator; } diff --git a/apps/database-abstractor/src/main/java/com/akto/action/InfraMetricsAction.java b/apps/database-abstractor/src/main/java/com/akto/action/InfraMetricsAction.java new file mode 100644 index 0000000000..afd97d41f2 --- /dev/null +++ b/apps/database-abstractor/src/main/java/com/akto/action/InfraMetricsAction.java @@ -0,0 +1,22 @@ +package com.akto.action; + +import com.opensymphony.xwork2.Action; +import com.opensymphony.xwork2.ActionSupport; + +public class InfraMetricsAction extends ActionSupport { + + String ok; + + public String health() { + ok = "ok"; + return Action.SUCCESS.toUpperCase(); + } + + public String getOk() { + return ok; + } + + public void setOk(String ok) { + this.ok = ok; + } +} diff --git a/apps/database-abstractor/src/main/java/com/akto/filter/AuthFilter.java b/apps/database-abstractor/src/main/java/com/akto/filter/AuthFilter.java index 57b53783b1..3360adc59b 100644 --- a/apps/database-abstractor/src/main/java/com/akto/filter/AuthFilter.java +++ b/apps/database-abstractor/src/main/java/com/akto/filter/AuthFilter.java @@ -31,6 +31,7 @@ public void doFilter(ServletRequest servletRequest, ServletResponse servletRespo } catch (Exception e) { System.out.println(e.getMessage()); httpServletResponse.sendError(401); + return; } chain.doFilter(servletRequest, servletResponse); diff --git a/apps/database-abstractor/src/main/java/com/akto/listener/InitializerListener.java b/apps/database-abstractor/src/main/java/com/akto/listener/InitializerListener.java index e4c79e2820..b9a477e59e 100644 --- a/apps/database-abstractor/src/main/java/com/akto/listener/InitializerListener.java +++ b/apps/database-abstractor/src/main/java/com/akto/listener/InitializerListener.java @@ -15,6 +15,7 @@ import com.akto.merging.Cron; import com.akto.utils.KafkaUtils; import com.mongodb.ConnectionString; +import com.mongodb.ReadPreference; public class InitializerListener implements ServletContextListener { diff --git a/apps/database-abstractor/src/main/java/com/akto/trafficFilter/HostFilter.java b/apps/database-abstractor/src/main/java/com/akto/trafficFilter/HostFilter.java new file mode 100644 index 0000000000..f54d6e732d --- /dev/null +++ b/apps/database-abstractor/src/main/java/com/akto/trafficFilter/HostFilter.java @@ -0,0 +1,36 @@ +package com.akto.trafficFilter; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import com.akto.dao.ApiCollectionsDao; +import com.akto.dto.ApiCollection; +import com.mongodb.client.model.Filters; + +public class HostFilter { + + private static final String K8S_DEFAULT_HOST = "kubernetes.default.svc"; + + private static Map> collectionSet = new HashMap<>(); + + public static Set getCollectionSet(int accountId) { + + Set ignoreCollectionSet = new HashSet<>(); + + if (collectionSet.containsKey(accountId)) { + return collectionSet.get(accountId); + } + + ApiCollection collection = ApiCollectionsDao.instance.findOne( + Filters.eq(ApiCollection.HOST_NAME, K8S_DEFAULT_HOST)); + + if (collection != null) { + ignoreCollectionSet.add(collection.getId()); + } + ignoreCollectionSet.add(0); + collectionSet.put(accountId, ignoreCollectionSet); + return collectionSet.get(accountId); + } +} \ No newline at end of file diff --git a/apps/database-abstractor/src/main/java/com/akto/trafficFilter/ParamFilter.java b/apps/database-abstractor/src/main/java/com/akto/trafficFilter/ParamFilter.java new file mode 100644 index 0000000000..af777a1f9b --- /dev/null +++ b/apps/database-abstractor/src/main/java/com/akto/trafficFilter/ParamFilter.java @@ -0,0 +1,140 @@ +package com.akto.trafficFilter; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import com.akto.dao.context.Context; +import com.akto.log.LoggerMaker; +import com.akto.log.LoggerMaker.LogDb; +import com.google.api.client.util.Charsets; +import com.google.common.hash.BloomFilter; +import com.google.common.hash.Funnels; + +public class ParamFilter { + private static final LoggerMaker loggerMaker = new LoggerMaker(ParamFilter.class, LogDb.DB_ABS); + + private static List> filterList = new ArrayList>() { + { + add(BloomFilter.create(Funnels.stringFunnel(Charsets.UTF_8), 1_000_000, 0.001)); + add(BloomFilter.create(Funnels.stringFunnel(Charsets.UTF_8), 1_000_000, 0.001)); + add(BloomFilter.create(Funnels.stringFunnel(Charsets.UTF_8), 1_000_000, 0.001)); + add(BloomFilter.create(Funnels.stringFunnel(Charsets.UTF_8), 1_000_000, 0.001)); + add(BloomFilter.create(Funnels.stringFunnel(Charsets.UTF_8), 1_000_000, 0.001)); + } + }; + + private static BloomFilter hostFilter = BloomFilter.create(Funnels.stringFunnel(Charsets.UTF_8), 1_000_000,0.001); + private static int currentFilterIndex = -1; + private static int filterFillStart = 0; + private static final int TIME_LIMIT = 5 * 60; + private static final int FILTER_LIMIT = 5; + private static final String DOLLAR = "$"; + private static final String HOST = "host"; + + private static void insertInFilter(String key) { + filterList.get(currentFilterIndex).put(key); + } + private static Map filterListHitCount = new HashMap<>(); + + private static synchronized void refreshFilterList() { + int now = Context.now(); + + if ((filterFillStart + TIME_LIMIT) < now) { + BloomFilter newFilter = BloomFilter.create(Funnels.stringFunnel(Charsets.UTF_8), 1_000_000, + 0.001); + + String hitCountLog = ""; + for (int i = 0; i < FILTER_LIMIT; i++) { + hitCountLog += filterListHitCount.getOrDefault(i, 0) + " "; + } + loggerMaker.infoAndAddToDb(String.format("ParamFilter hitCounts: %s",hitCountLog)); + + filterFillStart = now; + currentFilterIndex = (currentFilterIndex + 1) % FILTER_LIMIT; + if (currentFilterIndex < filterList.size()) { + filterList.set(currentFilterIndex, newFilter); + } else { + filterList.add(newFilter); + } + filterListHitCount.put(currentFilterIndex, 0); + } + } + + private static String createKey(int accountId, int apiCollectionId, String url, String method, String param) { + return accountId + DOLLAR + apiCollectionId + DOLLAR + url + DOLLAR + method + DOLLAR + param; + } + + private static int hits = 0; + private static int misses = 0; + private static int firstPrintTime = 0; + private static final int PRINT_INTERVAL = 60; + private static final int DEBUG_COUNT = 50; + private static int hostHits = 0; + private static int hostMisses = 0; + + private static void printL(Object o, boolean print) { + int now = Context.now(); + if (print) { + loggerMaker.infoAndAddToDb(o.toString()); + } + if ((firstPrintTime + PRINT_INTERVAL) < now) { + loggerMaker.infoAndAddToDb(String.format("ParamFilter hits: %d , misses: %d , hostHits %d, hostMisses %d, firstPrintTime: %d , now : %d",hits, misses, hostHits, hostMisses, firstPrintTime, now)); + firstPrintTime = now; + hits = 0; + misses = 0; + hostHits = 0; + hostMisses = 0; + } + } + + public static boolean isNewEntry(int accountId, int apiCollectionId, String url, String method, String param) { + String key = createKey(accountId, apiCollectionId, url, method, param); + + /* + * The host filter is no-op + * It serves as reference to how many new hosts we get. + */ + if (HOST.equals(param.toLowerCase())) { + if (!hostFilter.mightContain(key)) { + hostMisses++; + hostFilter.put(key); + printL("ParamFilter inserting host: " + key, hostMisses < DEBUG_COUNT); + } else { + hostHits++; + printL("ParamFilter skipping host: " + key, hostHits < DEBUG_COUNT); + } + } + + boolean isNew = true; + refreshFilterList(); + int i = FILTER_LIMIT; + while (i > 0) { + int ind = (currentFilterIndex + i) % FILTER_LIMIT; + try { + BloomFilter filter = filterList.get(ind); + boolean notFound = (!filter.mightContain(key)); + isNew &= notFound; + if (!notFound) { + int temp = filterListHitCount.getOrDefault(ind, 0) + 1; + filterListHitCount.put(ind, temp); + break; + } + } catch (Exception e) { + } + i--; + } + insertInFilter(key); + if (isNew) { + misses++; + printL("ParamFilter inserting: " + key, misses < DEBUG_COUNT); + return true; + }else { + hits++; + printL("ParamFilter skipping: " + key, hits < DEBUG_COUNT); + } + return false; + } + +} diff --git a/apps/database-abstractor/src/main/java/com/akto/utils/KafkaUtils.java b/apps/database-abstractor/src/main/java/com/akto/utils/KafkaUtils.java index 06b2a9dfff..9b7f50e0c9 100644 --- a/apps/database-abstractor/src/main/java/com/akto/utils/KafkaUtils.java +++ b/apps/database-abstractor/src/main/java/com/akto/utils/KafkaUtils.java @@ -32,7 +32,7 @@ public class KafkaUtils { private final static ObjectMapper mapper = new ObjectMapper(); private static final Gson gson = new Gson(); - private static final LoggerMaker loggerMaker = new LoggerMaker(KafkaUtils.class); + private static final LoggerMaker loggerMaker = new LoggerMaker(KafkaUtils.class, LogDb.DB_ABS); private static final Logger logger = LoggerFactory.getLogger(KafkaUtils.class); private Consumer consumer; private static Kafka kafkaProducer; @@ -81,7 +81,7 @@ public void run() { } catch (InterruptedException e) { e.printStackTrace(); } catch (Error e){ - loggerMaker.errorAndAddToDb("Error in add shut down hook: "+ e.getMessage(), LogDb.DASHBOARD); + loggerMaker.errorAndAddToDb("Error in add shut down hook: "+ e.toString()); } } }); @@ -89,7 +89,7 @@ public void run() { try { this.consumer.subscribe(Arrays.asList(topicName)); - loggerMaker.infoAndAddToDb("Kafka Consumer subscribed", LogDb.DASHBOARD); + loggerMaker.infoAndAddToDb("Kafka Consumer subscribed"); while (true) { ConsumerRecords records = this.consumer.poll(Duration.ofMillis(10000)); try { @@ -107,7 +107,7 @@ public void run() { parseAndTriggerWrites(r.value()); } catch (Exception e) { - loggerMaker.errorAndAddToDb(e, "Error in parseAndTriggerWrites " + e, LogDb.DASHBOARD); + loggerMaker.errorAndAddToDb(e, "Error in parseAndTriggerWrites " + e); continue; } } @@ -116,13 +116,12 @@ public void run() { // nothing to catch. This exception is called from the shutdown hook. } catch (Exception e) { exceptionOnCommitSync.set(true); - loggerMaker.errorAndAddToDb("Exception in init kafka consumer " + e.getMessage(),LogDb.DASHBOARD); + loggerMaker.errorAndAddToDb("Exception in init kafka consumer " + e.toString()); e.printStackTrace(); System.exit(0); } finally { this.consumer.close(); } - } public void initKafkaProducer() { @@ -186,6 +185,11 @@ private static void parseAndTriggerWrites(String message) throws Exception { dbAction.bulkWriteTestingRunIssues(); break; + case "bulkWriteSuspectSampleData": + dbAction.setWritesForSuspectSampleData(bulkWrites); + dbAction.bulkWriteSuspectSampleData(); + break; + default: break; } @@ -219,13 +223,42 @@ public boolean isWriteEnabled() { } public void insertData(List writes, String triggerMethod, int accountId) { - String topicName = System.getenv("AKTO_KAFKA_TOPIC_NAME"); - BasicDBObject obj = new BasicDBObject(); - obj.put("triggerMethod", triggerMethod); - String payloadStr = gson.toJson(writes); - obj.put("payload", payloadStr); - obj.put("accountId", accountId); - kafkaProducer.send(obj.toString(), topicName); + insertDataCore(writes, triggerMethod, accountId, "AKTO_KAFKA_TOPIC_NAME", null, "kafka insertData"); + } + + public void insertDataSecondary(Object writes, String triggerMethod, int accountId) { + insertDataCore(writes, triggerMethod, accountId, "AKTO_KAFKA_TOPIC_NAME_SECONDARY", "akto.secondary.trafficdata", "kafka insertDataSecondary"); + } + + /* + * By default, traffic Metrics and traffic Info are sent to the same kafka topic. + * In case of high load, we send them to a different topic + * and add a separate consumer for the topic. + */ + public void insertDataTraffic(List writes, String triggerMethod, int accountId) { + insertDataCore(writes, triggerMethod, accountId, "AKTO_KAFKA_TOPIC_NAME_TRAFFIC", "akto.trafficdata", "kafka insertDataTraffic"); + } + + public void insertDataCore(Object writes, String triggerMethod, int accountId, String topicEnvVar, String defaultTopic, String errorContext) { + try { + // Retrieve topic name from environment variable or use default if specified + String topicName = System.getenv(topicEnvVar); + if (topicName == null) { + if (defaultTopic != null) { + topicName = defaultTopic; + } else { + throw new Exception(topicEnvVar + " is null and no default topic provided"); + } + } + String payloadStr = gson.toJson(writes); + BasicDBObject obj = new BasicDBObject(); + obj.put("triggerMethod", triggerMethod); + obj.put("payload", payloadStr); + obj.put("accountId", accountId); + kafkaProducer.send(obj.toString(), topicName); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in " + errorContext + " " + e.toString()); + } } } diff --git a/apps/database-abstractor/src/main/java/com/akto/utils/RedactAlert.java b/apps/database-abstractor/src/main/java/com/akto/utils/RedactAlert.java new file mode 100644 index 0000000000..c65530c45e --- /dev/null +++ b/apps/database-abstractor/src/main/java/com/akto/utils/RedactAlert.java @@ -0,0 +1,117 @@ +package com.akto.utils; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.regex.Pattern; + +import com.akto.dao.AccountSettingsDao; +import com.akto.dao.context.Context; +import com.akto.dto.AccountSettings; +import com.akto.dto.Config; +import com.akto.log.LoggerMaker; +import com.akto.log.LoggerMaker.LogDb; + +public class RedactAlert { + private static final ExecutorService executorService = Executors.newFixedThreadPool(20); + private static final LoggerMaker loggerMaker = new LoggerMaker(RedactAlert.class, LogDb.DB_ABS); + + static final String regex = ".*\\*\\*\\*\\*.*"; + static final Pattern pattern = Pattern.compile(regex); + static final String connectRegex = ".*CONNECT.*"; + static final Pattern connectPattern = Pattern.compile(connectRegex); + + private static final int CACHE_INTERVAL = 2 * 60; + private static Map lastFetchedMap = new HashMap<>(); + private static Map redactMap = new HashMap<>(); + + private static boolean checkRedact() { + int now = Context.now(); + int accountId = Context.accountId.get(); + if (redactMap.containsKey(accountId) && + lastFetchedMap.containsKey(accountId) && + lastFetchedMap.get(accountId) + CACHE_INTERVAL > now) { + return redactMap.get(accountId); + } + + redactMap.put(accountId, false); + try{ + AccountSettings accountSettings = AccountSettingsDao.instance.findOne(AccountSettingsDao.generateFilter()); + if (accountSettings.isRedactPayload()) { + redactMap.put(accountId, true); + } + lastFetchedMap.put(accountId, now); + } catch (Exception e){ + loggerMaker.errorAndAddToDb(e, "Error in checkRedact"); + } + return redactMap.get(accountId); + } + + public static void sendToCyborgSlack(String message) { + String slackCyborgWebhookUrl = null; + try { + Config.SlackAlertCyborgConfig slackCyborgWebhook = com.akto.onprem.Constants.getSlackAlertCyborgConfig(); + if (slackCyborgWebhook != null && slackCyborgWebhook.getSlackWebhookUrl() != null + && !slackCyborgWebhook.getSlackWebhookUrl().isEmpty()) { + slackCyborgWebhookUrl = slackCyborgWebhook.getSlackWebhookUrl(); + LoggerMaker.sendToSlack(slackCyborgWebhookUrl, message); + } + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Unable to send slack alert"); + } + } + + + private static void checkRedactedDataAndSendAlert(List data, + int apiCollectionId, String method, String url) { + + /* + * This condition fails if the sample only + * contains host request header and nothing else. + * This was being observed in CONNECT APIs, + * thus added an additional check for that. + */ + for (String d : data) { + if (!pattern.matcher(d).matches() && !connectPattern.matcher(d).matches()) { + int accountId = Context.accountId.get(); + String message = String.format("Un-redacted sample data coming for account %d for API: %d %s %s", + accountId, apiCollectionId, method, url); + sendToCyborgSlack(message); + } + } + } + + public static void submitSampleDataForChecking(List data, + int apiCollectionId, String method, String url) { + int accountId = Context.accountId.get(); + + if (!checkRedact()) { + return; + } + executorService.submit(() -> { + Context.accountId.set(accountId); + try { + checkRedactedDataAndSendAlert(data, apiCollectionId, method, url); + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Error in check redact and send alert" + e.getMessage()); + } + }); + } + + public static void submitSensitiveSampleDataCall(int apiCollectionId) { + int accountId = Context.accountId.get(); + + if (!checkRedact()) { + return; + } + executorService.submit(() -> { + Context.accountId.set(accountId); + String message = String.format( + "Unredacted sensitive sample data coming for account %d for API collection: %d", + accountId, apiCollectionId); + sendToCyborgSlack(message); + }); + } +} \ No newline at end of file diff --git a/apps/database-abstractor/src/main/java/com/akto/utils/SampleDataLogs.java b/apps/database-abstractor/src/main/java/com/akto/utils/SampleDataLogs.java new file mode 100644 index 0000000000..601cbc65af --- /dev/null +++ b/apps/database-abstractor/src/main/java/com/akto/utils/SampleDataLogs.java @@ -0,0 +1,50 @@ +package com.akto.utils; + +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.akto.dao.context.Context; + +public class SampleDataLogs { + + static Map countMap = new HashMap<>(); + private static final Logger logger = LoggerFactory.getLogger(SampleDataLogs.class); + + public static int modVal() { + String val = System.getenv("MOD_VAL"); + try { + int i = Integer.valueOf(val); + return i; + } catch (Exception e) { + } + return 10; + } + + public static String createKey(int apiCollectionId, String method, String url) { + int accountId = Context.accountId.get(); + String q = String.format("%d %d %s %s", accountId, apiCollectionId, method, url); + return q; + } + + public static void insertCount(int apiCollectionId, String method, String url, int c) { + String q = createKey(apiCollectionId, method, url); + int count = 0; + if (countMap.containsKey(q)) { + count = countMap.get(q) + c; + } + countMap.put(q, count); + } + + public static void printLog(int apiCollectionId, String method, String url) { + String q = createKey(apiCollectionId, method, url); + if (countMap.containsKey(q)) { + int count = countMap.get(q); + if (count % modVal() == 0) { + logger.info(String.format("%s count : %d", q, count)); + } + } + } +} diff --git a/apps/database-abstractor/src/main/resources/struts.xml b/apps/database-abstractor/src/main/resources/struts.xml index 71f91c2c51..7ca1ac94c3 100644 --- a/apps/database-abstractor/src/main/resources/struts.xml +++ b/apps/database-abstractor/src/main/resources/struts.xml @@ -13,6 +13,17 @@ + + + + + + 422 + false + ^actionErrors.* + + + @@ -1091,6 +1102,17 @@ + + + + + + 422 + false + ^actionErrors.* + + + @@ -1124,6 +1146,82 @@ + + + + + + 422 + false + ^actionErrors.* + + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + 422 + false + ^actionErrors.* + + + @@ -1135,6 +1233,17 @@ + + + + + + 422 + false + ^actionErrors.* + + + @@ -1190,7 +1299,84 @@ - + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + 422 + false + ^actionErrors.* + + + + + + + + + 422 + false + ^actionErrors.* + + + + @@ -1201,7 +1387,7 @@ - + @@ -1223,6 +1409,17 @@ + + + + + + 422 + false + ^actionErrors.* + + + diff --git a/apps/database-abstractor/src/test/java/com/akto/trafficFilter/ParamFilterTest.java b/apps/database-abstractor/src/test/java/com/akto/trafficFilter/ParamFilterTest.java new file mode 100644 index 0000000000..4acb1f6618 --- /dev/null +++ b/apps/database-abstractor/src/test/java/com/akto/trafficFilter/ParamFilterTest.java @@ -0,0 +1,26 @@ +package com.akto.trafficFilter; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; + + +public class ParamFilterTest { + + @Test + public void testFilterEntry() { + + int accountId = 1_000_000; + int apiCollectionId = 123; + String url = "/testing"; + String method = "GET"; + String param = "host"; + + boolean firstTime = ParamFilter.isNewEntry(accountId, apiCollectionId, url, method, param); + assertTrue(firstTime); + boolean secondTime = ParamFilter.isNewEntry(accountId, apiCollectionId, url, method, param); + assertFalse(secondTime); + } + +} diff --git a/libs/dao/pom.xml b/libs/dao/pom.xml index 9f2334f045..f7225d1387 100644 --- a/libs/dao/pom.xml +++ b/libs/dao/pom.xml @@ -105,7 +105,7 @@ org.springframework.security spring-security-web - 5.6.2 + 5.6.3 de.flapdoodle.embed diff --git a/libs/dao/src/main/java/com/akto/DaoInit.java b/libs/dao/src/main/java/com/akto/DaoInit.java index fa21968747..392afbe542 100644 --- a/libs/dao/src/main/java/com/akto/DaoInit.java +++ b/libs/dao/src/main/java/com/akto/DaoInit.java @@ -8,6 +8,7 @@ import com.akto.dao.testing.TestingRunResultDao; import com.akto.dao.testing.TestingRunResultSummariesDao; import com.akto.dao.testing_run_findings.TestingRunIssuesDao; +import com.akto.dao.traffic_metrics.RuntimeMetricsDao; import com.akto.dao.traffic_metrics.TrafficMetricsDao; import com.akto.dao.usage.UsageMetricsDao; import com.akto.dto.*; @@ -30,12 +31,15 @@ import com.akto.dto.test_run_findings.TestingRunIssues; import com.akto.dto.testing.*; import com.akto.dto.testing.config.TestCollectionProperty; +import com.akto.dto.testing.custom_groups.AllAPIsGroup; +import com.akto.dto.testing.custom_groups.UnauthenticatedEndpoint; import com.akto.dto.testing.info.BFLATestInfo; import com.akto.dto.testing.info.TestInfo; import com.akto.dto.testing.sources.TestSourceConfig; import com.akto.dto.third_party_access.Credential; import com.akto.dto.third_party_access.ThirdPartyAccess; import com.akto.dto.traffic.SampleData; +import com.akto.dto.traffic_metrics.RuntimeMetrics; import com.akto.dto.traffic_metrics.TrafficMetrics; import com.akto.dto.traffic_metrics.TrafficMetricsAlert; import com.akto.dto.type.SingleTypeInfo; @@ -199,6 +203,8 @@ public static CodecRegistry createCodecRegistry(){ .builder(BelongsToPredicate.class).enableDiscriminator(true).build(); ClassModel yamlNodeDetails = ClassModel .builder(YamlNodeDetails.class).enableDiscriminator(true).build(); + ClassModel unauthenticatedEndpointsClassModel = ClassModel + .builder(UnauthenticatedEndpoint.class).enableDiscriminator(true).build(); // ClassModel awsResourceModel = // ClassModel.builder(AwsResource.class).enableDiscriminator(true) // .build(); @@ -246,6 +252,10 @@ public static CodecRegistry createCodecRegistry(){ ClassModel codeAnalysisApiInfoKeyClassModel = ClassModel.builder(CodeAnalysisApiInfo.CodeAnalysisApiInfoKey.class).enableDiscriminator(true).build(); ClassModel riskScoreTestingEndpointsClassModel = ClassModel.builder(RiskScoreTestingEndpoints.class).enableDiscriminator(true).build(); ClassModel OrganizationFlagsClassModel = ClassModel.builder(OrganizationFlags.class).enableDiscriminator(true).build(); + ClassModel sensitiveDataEndpointsClassModel = ClassModel.builder(SensitiveDataEndpoints.class).enableDiscriminator(true).build(); + ClassModel allApisGroupClassModel = ClassModel.builder(AllAPIsGroup.class).enableDiscriminator(true).build(); + + ClassModel RuntimeMetricsClassModel = ClassModel.builder(RuntimeMetrics.class).enableDiscriminator(true).build(); CodecRegistry pojoCodecRegistry = fromProviders(PojoCodecProvider.builder().register( configClassModel, signupInfoClassModel, apiAuthClassModel, attempResultModel, urlTemplateModel, @@ -275,7 +285,7 @@ public static CodecRegistry createCodecRegistry(){ yamlNodeDetails, multiExecTestResultClassModel, workflowTestClassModel, dependencyNodeClassModel, paramInfoClassModel, nodeClassModel, connectionClassModel, edgeClassModel, replaceDetailClassModel, modifyHostDetailClassModel, fileUploadClassModel ,fileUploadLogClassModel, codeAnalysisCollectionClassModel, codeAnalysisApiLocationClassModel, codeAnalysisApiInfoClassModel, codeAnalysisApiInfoKeyClassModel, - riskScoreTestingEndpointsClassModel, OrganizationFlagsClassModel).automatic(true).build()); + riskScoreTestingEndpointsClassModel, OrganizationFlagsClassModel, sensitiveDataEndpointsClassModel, unauthenticatedEndpointsClassModel, allApisGroupClassModel, RuntimeMetricsClassModel).automatic(true).build()); final CodecRegistry customEnumCodecs = CodecRegistries.fromCodecs( new EnumCodec<>(Conditions.Operator.class), @@ -374,6 +384,8 @@ public static void createIndices() { DependencyFlowNodesDao.instance.createIndicesIfAbsent(); CodeAnalysisCollectionDao.instance.createIndicesIfAbsent(); CodeAnalysisApiInfoDao.instance.createIndicesIfAbsent(); + RBACDao.instance.createIndicesIfAbsent(); + RuntimeMetricsDao.instance.createIndicesIfAbsent(); } } diff --git a/libs/dao/src/main/java/com/akto/dao/ApiCollectionsDao.java b/libs/dao/src/main/java/com/akto/dao/ApiCollectionsDao.java index 15635a34a5..a6b1a86aa9 100644 --- a/libs/dao/src/main/java/com/akto/dao/ApiCollectionsDao.java +++ b/libs/dao/src/main/java/com/akto/dao/ApiCollectionsDao.java @@ -2,11 +2,11 @@ import com.akto.dao.context.Context; import com.akto.dto.ApiCollection; -import com.akto.dto.ApiInfo; import com.akto.dto.ApiInfo.ApiInfoKey; +import com.akto.dto.CodeAnalysisCollection; +import com.akto.dto.testing.CollectionWiseTestingEndpoints; import com.akto.dto.type.SingleTypeInfo; import com.akto.util.Constants; -import com.akto.dto.type.SingleTypeInfo; import com.mongodb.BasicDBObject; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; @@ -101,8 +101,20 @@ public List fetchApiGroups() { return ApiCollectionsDao.instance.findAll(Filters.eq(ApiCollection._TYPE, ApiCollection.Type.API_GROUP.toString())); } + public List fetchNonApiGroupsIds() { + return ApiCollectionsDao.instance.findAll( + nonApiGroupFilter(), + Projections.include(ApiCollection.ID)); + } + + public Bson nonApiGroupFilter() { + return Filters.or( + Filters.exists(ApiCollection._TYPE, false), + Filters.ne(ApiCollection._TYPE, ApiCollection.Type.API_GROUP.toString())); + } + public ApiCollection findByName(String name) { - List apiCollections = ApiCollectionsDao.instance.findAll(new BasicDBObject()); + List apiCollections = ApiCollectionsDao.instance.findAll(nonApiGroupFilter()); for (ApiCollection apiCollection: apiCollections) { if (apiCollection.getDisplayName() == null) continue; if (apiCollection.getDisplayName().equalsIgnoreCase(name)) { @@ -159,6 +171,22 @@ public Map buildEndpointsCountToApiCollectionMap() { } } + Map codeAnalysisUrlsCountMap = CodeAnalysisApiInfoDao.instance.getUrlsCount(); + if (codeAnalysisUrlsCountMap.isEmpty()) return countMap; + + Map idToCollectionNameMap = CodeAnalysisCollectionDao.instance.findIdToCollectionNameMap(); + for (String codeAnalysisId: codeAnalysisUrlsCountMap.keySet()) { + int count = codeAnalysisUrlsCountMap.getOrDefault(codeAnalysisId, 0); + Integer apiCollectionId = idToCollectionNameMap.get(codeAnalysisId); + if (apiCollectionId == null) continue; + + int currentCount = countMap.getOrDefault(apiCollectionId, 0); + currentCount += count; + + countMap.put(apiCollectionId, currentCount); + } + + return countMap; } @@ -180,8 +208,11 @@ public static List fetchEndpointsInCollection(int apiCollectionId pipeline.add(Aggregates.project(projections)); pipeline.add(Aggregates.group(groupedId, Accumulators.min("startTs", "$timestamp"), Accumulators.sum("changesCount", 1))); - pipeline.add(Aggregates.skip(skip)); - pipeline.add(Aggregates.limit(limit)); + if(limit != -1){ + pipeline.add(Aggregates.skip(skip)); + pipeline.add(Aggregates.limit(limit)); + } + pipeline.add(Aggregates.sort(Sorts.descending("startTs"))); MongoCursor endpointsCursor = SingleTypeInfoDao.instance.getMCollection().aggregate(pipeline, BasicDBObject.class).cursor(); @@ -194,9 +225,11 @@ public static List fetchEndpointsInCollection(int apiCollectionId return endpoints; } + public static final int STIS_LIMIT = 10_000; + public static List fetchHostSTI(int apiCollectionId, int skip) { Bson filterQ = SingleTypeInfoDao.filterForHostHeader(apiCollectionId, true); - return SingleTypeInfoDao.instance.findAll(filterQ, skip,10_000, null); + return SingleTypeInfoDao.instance.findAll(filterQ, skip, STIS_LIMIT, null); } public static List fetchEndpointsInCollectionUsingHost(int apiCollectionId, int skip, int limit, int deltaPeriodValue) { @@ -210,7 +243,17 @@ public static List fetchEndpointsInCollectionUsingHost(int apiCol if (apiCollection.getHostName() == null || apiCollection.getHostName().length() == 0 ) { return fetchEndpointsInCollection(apiCollectionId, skip, limit, deltaPeriodValue); } else { - List allUrlsInCollection = fetchHostSTI(apiCollectionId, skip); + List allUrlsInCollection = new ArrayList<>(); + int localSkip = 0; + while(true){ + List stis = fetchHostSTI(apiCollectionId, localSkip); + allUrlsInCollection.addAll(stis); + if(stis.size() < STIS_LIMIT){ + break; + } + + localSkip += STIS_LIMIT; + } List endpoints = new ArrayList<>(); for(SingleTypeInfo singleTypeInfo: allUrlsInCollection) { diff --git a/libs/dao/src/main/java/com/akto/dao/CodeAnalysisApiInfoDao.java b/libs/dao/src/main/java/com/akto/dao/CodeAnalysisApiInfoDao.java index f3f370d98a..c391887409 100644 --- a/libs/dao/src/main/java/com/akto/dao/CodeAnalysisApiInfoDao.java +++ b/libs/dao/src/main/java/com/akto/dao/CodeAnalysisApiInfoDao.java @@ -3,6 +3,17 @@ import com.akto.dto.ApiInfo; import com.akto.dto.CodeAnalysisApiInfo; import com.akto.dto.CodeAnalysisCollection; +import com.mongodb.BasicDBObject; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.model.Accumulators; +import com.mongodb.client.model.Aggregates; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; public class CodeAnalysisApiInfoDao extends AccountsContextDao { @@ -13,6 +24,29 @@ public void createIndicesIfAbsent() { MCollection.createIndexIfAbsent(getDBName(), getCollName(), fieldNames, true); } + public Map getUrlsCount() { + Map countMap = new HashMap<>(); + List pipeline = new ArrayList<>(); + + BasicDBObject groupedId = new BasicDBObject("codeAnalysisCollectionId", "$_id.codeAnalysisCollectionId"); + pipeline.add(Aggregates.group(groupedId, Accumulators.sum("count",1))); + + MongoCursor endpointsCursor = instance.getMCollection().aggregate(pipeline, BasicDBObject.class).cursor(); + while(endpointsCursor.hasNext()) { + try { + BasicDBObject basicDBObject = endpointsCursor.next(); + BasicDBObject id = (BasicDBObject) basicDBObject.get("_id"); + ObjectId codeAnalysisCollectionId = (ObjectId) id.get("codeAnalysisCollectionId"); + int count = basicDBObject.getInt("count"); + countMap.put(codeAnalysisCollectionId.toHexString(), count); + } catch (Exception e) { + e.printStackTrace(); + } + } + + return countMap; + } + @Override public String getCollName() { return "code_analysis_api_infos"; diff --git a/libs/dao/src/main/java/com/akto/dao/CodeAnalysisCollectionDao.java b/libs/dao/src/main/java/com/akto/dao/CodeAnalysisCollectionDao.java index 03ce3f892f..77a534b5b2 100644 --- a/libs/dao/src/main/java/com/akto/dao/CodeAnalysisCollectionDao.java +++ b/libs/dao/src/main/java/com/akto/dao/CodeAnalysisCollectionDao.java @@ -3,6 +3,12 @@ import com.akto.dto.ApiCollection; import com.akto.dto.CodeAnalysisCollection; import com.akto.dto.usage.UsageMetric; +import com.mongodb.BasicDBObject; +import com.mongodb.client.model.Filters; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; public class CodeAnalysisCollectionDao extends AccountsContextDao { @@ -12,6 +18,16 @@ public void createIndicesIfAbsent() { MCollection.createIndexIfAbsent(getDBName(), getCollName(), new String[] { CodeAnalysisCollection.NAME }, true); } + public Map findIdToCollectionNameMap() { + Map idToCollectionNameMap = new HashMap<>(); + List codeAnalysisCollections = instance.findAll(Filters.exists(CodeAnalysisCollection.API_COLLECTION_ID)); + for (CodeAnalysisCollection codeAnalysisCollection: codeAnalysisCollections) { + idToCollectionNameMap.put(codeAnalysisCollection.getId().toHexString(), codeAnalysisCollection.getApiCollectionId()); + } + + return idToCollectionNameMap; + } + @Override public String getCollName() { return "code_analysis_collections"; diff --git a/libs/dao/src/main/java/com/akto/dao/CodeAnalysisRepoDao.java b/libs/dao/src/main/java/com/akto/dao/CodeAnalysisRepoDao.java new file mode 100644 index 0000000000..2d43761b03 --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dao/CodeAnalysisRepoDao.java @@ -0,0 +1,18 @@ +package com.akto.dao; + +import com.akto.dto.CodeAnalysisRepo; + +public class CodeAnalysisRepoDao extends AccountsContextDao{ + + public static final CodeAnalysisRepoDao instance = new CodeAnalysisRepoDao(); + + @Override + public String getCollName() { + return "code_analysis_repos"; + } + + @Override + public Class getClassT() { + return CodeAnalysisRepo.class; + } +} diff --git a/libs/dao/src/main/java/com/akto/dao/CodeAnalysisSingleTypeInfoDao.java b/libs/dao/src/main/java/com/akto/dao/CodeAnalysisSingleTypeInfoDao.java new file mode 100644 index 0000000000..477d9d914e --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dao/CodeAnalysisSingleTypeInfoDao.java @@ -0,0 +1,18 @@ +package com.akto.dao; + +import com.akto.dto.type.SingleTypeInfo; + +public class CodeAnalysisSingleTypeInfoDao extends AccountsContextDao { + + public static final CodeAnalysisSingleTypeInfoDao instance = new CodeAnalysisSingleTypeInfoDao(); + + @Override + public String getCollName() { + return "code_analysis_single_type_infos"; + } + + @Override + public Class getClassT() { + return SingleTypeInfo.class; + } +} diff --git a/libs/dao/src/main/java/com/akto/dao/ProtectionLogsDao.java b/libs/dao/src/main/java/com/akto/dao/ProtectionLogsDao.java new file mode 100644 index 0000000000..7a7bbf377a --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dao/ProtectionLogsDao.java @@ -0,0 +1,49 @@ +package com.akto.dao; + +import com.akto.dao.context.Context; +import com.akto.dto.Log; +import com.akto.util.DbMode; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; + +public class ProtectionLogsDao extends AccountsContextDao { + + public static final ProtectionLogsDao instance = new ProtectionLogsDao(); + + public static final int maxDocuments = 100_000; + public static final int sizeInBytes = 100_000_000; + + public void createIndicesIfAbsent() { + boolean exists = false; + String dbName = Context.accountId.get()+""; + MongoDatabase db = clients[0].getDatabase(dbName); + for (String col: db.listCollectionNames()){ + if (getCollName().equalsIgnoreCase(col)){ + exists = true; + break; + } + }; + + if (!exists) { + if (DbMode.allowCappedCollections()) { + db.createCollection(getCollName(), new CreateCollectionOptions().capped(true).maxDocuments(maxDocuments).sizeInBytes(sizeInBytes)); + } else { + db.createCollection(getCollName()); + } + } + + String[] fieldNames = {Log.TIMESTAMP}; + MCollection.createIndexIfAbsent(getDBName(), getCollName(), fieldNames,false); + } + + @Override + public String getCollName() { + return "protection_logs"; + } + + @Override + public Class getClassT() { + return Log.class; + } + +} diff --git a/libs/dao/src/main/java/com/akto/dao/RBACDao.java b/libs/dao/src/main/java/com/akto/dao/RBACDao.java index 394410d6c7..1e6108770c 100644 --- a/libs/dao/src/main/java/com/akto/dao/RBACDao.java +++ b/libs/dao/src/main/java/com/akto/dao/RBACDao.java @@ -1,12 +1,44 @@ package com.akto.dao; +import com.akto.util.Pair; +import io.swagger.models.auth.In; +import org.bson.conversions.Bson; + +import com.akto.dao.context.Context; import com.akto.dto.RBAC; +import com.akto.dto.RBAC.Role; import com.mongodb.client.model.Filters; +import java.util.concurrent.ConcurrentHashMap; + public class RBACDao extends CommonContextDao { public static final RBACDao instance = new RBACDao(); + //Caching for RBACDAO + private static final ConcurrentHashMap, Pair> userRolesMap = new ConcurrentHashMap<>(); + private static final int EXPIRY_TIME = 15 * 60; // 15 minute + public void createIndicesIfAbsent() { + + boolean exists = false; + for (String col: clients[0].getDatabase(Context.accountId.get()+"").listCollectionNames()){ + if (getCollName().equalsIgnoreCase(col)){ + exists = true; + break; + } + }; + + if (!exists) { + clients[0].getDatabase(Context.accountId.get()+"").createCollection(getCollName()); + } + + String[] fieldNames = {RBAC.USER_ID, RBAC.ACCOUNT_ID}; + MCollection.createIndexIfAbsent(getDBName(), getCollName(), fieldNames, true); + } + + public void deleteUserEntryFromCache(Pair key) { + userRolesMap.remove(key); + } public boolean isAdmin(int userId, int accountId) { RBAC rbac = RBACDao.instance.findOne( Filters.or(Filters.and( @@ -27,6 +59,28 @@ public boolean isAdmin(int userId, int accountId) { return rbac != null && rbac.getAccountId() == accountId; } + public static Role getCurrentRoleForUser(int userId, int accountId){ + Pair key = new Pair<>(userId, accountId); + Pair userRoleEntry = userRolesMap.get(key); + Role currentRole; + if (userRoleEntry == null || (Context.now() - userRoleEntry.getSecond() > EXPIRY_TIME)) { + Bson filterRbac = Filters.and( + Filters.eq(RBAC.USER_ID, userId), + Filters.eq(RBAC.ACCOUNT_ID, accountId)); + + RBAC userRbac = RBACDao.instance.findOne(filterRbac); + if(userRbac != null){ + currentRole = userRbac.getRole(); + }else{ + currentRole = Role.MEMBER; + } + + userRolesMap.put(key, new Pair<>(currentRole, Context.now())); + } else { + currentRole = userRoleEntry.getFirst(); + } + return currentRole; + } @Override public String getCollName() { return "rbac"; diff --git a/libs/dao/src/main/java/com/akto/dao/SampleDataDao.java b/libs/dao/src/main/java/com/akto/dao/SampleDataDao.java index b917b8e7c2..d95b7d2eb2 100644 --- a/libs/dao/src/main/java/com/akto/dao/SampleDataDao.java +++ b/libs/dao/src/main/java/com/akto/dao/SampleDataDao.java @@ -65,6 +65,21 @@ public SampleData fetchSampleDataForApi(int apiCollectionId, String url, URLMeth return SampleDataDao.instance.findOne(filterQSampleData); } + public SampleData fetchAllSampleDataForApi(int apiCollectionId, String url, URLMethods.Method method) { + Bson filterQSampleData = filterForSampleData(apiCollectionId, url, method); + List list = SampleDataDao.instance.findAll(filterQSampleData); + SampleData sampleData = new SampleData(); + if (list != null && !list.isEmpty()) { + sampleData = list.get(0); + if (list.size() > 1) { + for (SampleData data : list) { + sampleData.getSamples().addAll(data.getSamples()); + } + } + } + return sampleData; + } + public static Bson filterForSampleData(int apiCollectionId, String url, URLMethods.Method method) { return Filters.and( Filters.eq("_id.apiCollectionId", apiCollectionId), diff --git a/libs/dao/src/main/java/com/akto/dao/SetupDao.java b/libs/dao/src/main/java/com/akto/dao/SetupDao.java index 0526ca34d8..9999095d3d 100644 --- a/libs/dao/src/main/java/com/akto/dao/SetupDao.java +++ b/libs/dao/src/main/java/com/akto/dao/SetupDao.java @@ -1,6 +1,7 @@ package com.akto.dao; import com.akto.dto.Setup; +import com.mongodb.BasicDBObject; public class SetupDao extends CommonContextDao { @@ -16,4 +17,17 @@ public Class getClassT() { return Setup.class; } + private final static String SAAS = "saas"; + public boolean isMetered() { + Setup setup = SetupDao.instance.findOne(new BasicDBObject()); + boolean isSaas = false; + if (setup != null) { + String dashboardMode = setup.getDashboardMode(); + if (dashboardMode != null) { + isSaas = dashboardMode.equalsIgnoreCase(SAAS); + } + } + return isSaas; + } + } diff --git a/libs/dao/src/main/java/com/akto/dao/SingleTypeInfoDao.java b/libs/dao/src/main/java/com/akto/dao/SingleTypeInfoDao.java index cf287916e3..a2d0e26f47 100644 --- a/libs/dao/src/main/java/com/akto/dao/SingleTypeInfoDao.java +++ b/libs/dao/src/main/java/com/akto/dao/SingleTypeInfoDao.java @@ -362,6 +362,15 @@ public List fetchEndpointsBySubType(SingleTypeInfo.SubType s return processPipelineForEndpoint(pipeline); } + public List fetchSensitiveEndpoints(int apiCollectionId, int skip, int limit) { + Bson filter = filterForSensitiveParamsExcludingUserMarkedSensitive(apiCollectionId, + null, null, null); + List pipeline = getPipelineForEndpoints(filter); + pipeline.add(Aggregates.limit(limit)); + pipeline.add(Aggregates.skip(skip)); + return processPipelineForEndpoint(pipeline); + } + private List getPipelineForEndpoints(Bson matchCriteria) { List pipeline = new ArrayList<>(); BasicDBObject groupedId = diff --git a/libs/dao/src/main/java/com/akto/dao/SuspectSampleDataDao.java b/libs/dao/src/main/java/com/akto/dao/SuspectSampleDataDao.java new file mode 100644 index 0000000000..3575da81f3 --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dao/SuspectSampleDataDao.java @@ -0,0 +1,49 @@ +package com.akto.dao; + +import com.akto.dao.context.Context; +import com.akto.dto.traffic.SuspectSampleData; +import com.akto.util.Constants; +import com.akto.util.DbMode; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; + +public class SuspectSampleDataDao extends AccountsContextDao { + public static final SuspectSampleDataDao instance = new SuspectSampleDataDao(); + + public static final int maxDocuments = 100_000; + public static final int sizeInBytes = 100_000_000; + + public void createIndicesIfAbsent() { + boolean exists = false; + String dbName = Context.accountId.get() + ""; + MongoDatabase db = clients[0].getDatabase(dbName); + for (String col : db.listCollectionNames()) { + if (getCollName().equalsIgnoreCase(col)) { + exists = true; + break; + } + } + + if (!exists) { + if (DbMode.allowCappedCollections()) { + db.createCollection(getCollName(), + new CreateCollectionOptions().capped(true).maxDocuments(maxDocuments).sizeInBytes(sizeInBytes)); + } else { + db.createCollection(getCollName()); + } + } + + String[] fieldNames = { SuspectSampleData._DISCOVERED, SuspectSampleData.SOURCE_IPS, SuspectSampleData.MATCHING_URL, SuspectSampleData.API_COLLECTION_ID, Constants.ID }; + MCollection.createIndexIfAbsent(getDBName(), getCollName(), fieldNames, false); + } + + @Override + public String getCollName() { + return "suspect_sample_data"; + } + + @Override + public Class getClassT() { + return SuspectSampleData.class; + } +} diff --git a/libs/dao/src/main/java/com/akto/dao/testing/VulnerableTestingRunResultDao.java b/libs/dao/src/main/java/com/akto/dao/testing/VulnerableTestingRunResultDao.java new file mode 100644 index 0000000000..3355d2f705 --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dao/testing/VulnerableTestingRunResultDao.java @@ -0,0 +1,86 @@ +package com.akto.dao.testing; + +import java.util.List; + +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; + +import com.akto.dao.MCollection; +import com.akto.dao.context.Context; +import com.akto.dto.testing.GenericTestResult; +import com.akto.dto.testing.TestingRunResult; +import com.akto.dto.testing.TestingRunResultSummary; +import com.akto.util.Constants; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.Filters; + +public class VulnerableTestingRunResultDao extends TestingRunResultDao { + + public static final VulnerableTestingRunResultDao instance = new VulnerableTestingRunResultDao(); + + @Override + public void createIndicesIfAbsent() { + + String dbName = Context.accountId.get()+""; + + CreateCollectionOptions createCollectionOptions = new CreateCollectionOptions(); + createCollectionIfAbsent(dbName, getCollName(), createCollectionOptions); + + + MCollection.createIndexIfAbsent(getDBName(), getCollName(), + new String[] { TestingRunResult.TEST_RUN_RESULT_SUMMARY_ID }, false); + + String[] fieldNames = new String[]{TestingRunResult.TEST_RUN_RESULT_SUMMARY_ID, TestingRunResult.TEST_RESULTS+"."+GenericTestResult._CONFIDENCE}; + MCollection.createIndexIfAbsent(getDBName(), getCollName(), fieldNames, false); + + fieldNames = new String[]{TestingRunResult.TEST_RUN_RESULT_SUMMARY_ID, TestingRunResult.TEST_SUPER_TYPE}; + MCollection.createIndexIfAbsent(getDBName(), getCollName(), fieldNames, false); + + fieldNames = new String[]{TestingRunResult.TEST_RUN_RESULT_SUMMARY_ID, TestingRunResult.API_INFO_KEY, TestingRunResult.TEST_SUB_TYPE}; + MCollection.createIndexIfAbsent(getDBName(), getCollName(), fieldNames, false); + } + + public boolean isStoredInVulnerableCollection(ObjectId objectId){ + try { + Bson filter = Filters.and( + Filters.eq(Constants.ID, objectId), + Filters.eq(TestingRunResultSummary.IS_NEW_TESTING_RUN_RESULT_SUMMARY, true) + ); + boolean isNew = TestingRunResultSummariesDao.instance.count(filter) > 0; + return isNew; + } catch (Exception e) { + e.printStackTrace(); + return false; + } + } + + public int countFromDb(Bson filter, boolean isVulnerable){ + if(isVulnerable){ + int count = (int) instance.count(filter); + if(count != 0){ + return count; + } + } + return (int) TestingRunResultDao.instance.count(filter); + } + + public TestingRunResult findOneWithComparison(Bson q, Bson projection) { + TestingRunResult tr = super.findOne(q, projection); + if(tr == null){ + return TestingRunResultDao.instance.findOne(q, projection); + } + return tr; + } + + public List findAll(Bson q, Bson projection, boolean isStoredInVulnerableCollection) { + if(isStoredInVulnerableCollection){ + return instance.findAll(q,projection); + } + return TestingRunResultDao.instance.findAll(q, projection); + } + + @Override + public String getCollName() { + return "vulnerable_testing_run_results"; + } +} \ No newline at end of file diff --git a/libs/dao/src/main/java/com/akto/dao/traffic_collector/TrafficCollectorInfoDao.java b/libs/dao/src/main/java/com/akto/dao/traffic_collector/TrafficCollectorInfoDao.java new file mode 100644 index 0000000000..d3aaa85c6f --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dao/traffic_collector/TrafficCollectorInfoDao.java @@ -0,0 +1,60 @@ +package com.akto.dao.traffic_collector; + +import com.akto.dao.AccountsContextDao; +import com.akto.dao.context.Context; +import com.akto.dto.traffic_collector.TrafficCollectorInfo; +import com.akto.util.DbMode; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.Updates; + +public class TrafficCollectorInfoDao extends AccountsContextDao { + + public static final TrafficCollectorInfoDao instance = new TrafficCollectorInfoDao(); + + @Override + public String getCollName() { + return "traffic_collector_info"; + } + + @Override + public Class getClassT() { + return TrafficCollectorInfo.class; + } + + public static final int maxDocuments = 10_000; + public static final int sizeInBytes = 10_000_000; + + public void createIndicesIfAbsent() { + boolean exists = false; + String dbName = Context.accountId.get()+""; + MongoDatabase db = clients[0].getDatabase(dbName); + for (String col: db.listCollectionNames()){ + if (getCollName().equalsIgnoreCase(col)){ + exists = true; + break; + } + }; + + if (!exists) { + if (DbMode.allowCappedCollections()) { + db.createCollection(getCollName(), new CreateCollectionOptions().capped(true).maxDocuments(maxDocuments).sizeInBytes(sizeInBytes)); + } else { + db.createCollection(getCollName()); + } + } + } + + + public void updateHeartbeat(String id, String runtimeId) { + instance.updateOne( + Filters.eq("_id", id), + Updates.combine( + Updates.set(TrafficCollectorInfo.LAST_HEARTBEAT, Context.now()), + Updates.setOnInsert(TrafficCollectorInfo.START_TIME, Context.now()), + Updates.setOnInsert(TrafficCollectorInfo.RUNTIME_ID, runtimeId) + ) + ); + } +} diff --git a/libs/dao/src/main/java/com/akto/dao/traffic_collector/TrafficCollectorMetricsDao.java b/libs/dao/src/main/java/com/akto/dao/traffic_collector/TrafficCollectorMetricsDao.java new file mode 100644 index 0000000000..8efee9ba9d --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dao/traffic_collector/TrafficCollectorMetricsDao.java @@ -0,0 +1,72 @@ +package com.akto.dao.traffic_collector; + +import com.akto.dao.AccountsContextDao; +import com.akto.dao.context.Context; +import com.akto.dto.traffic_collector.TrafficCollectorInfo; +import com.akto.dto.traffic_collector.TrafficCollectorMetrics; +import com.akto.util.DbMode; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.Updates; +import org.apache.commons.collections.ArrayStack; +import org.bson.conversions.Bson; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class TrafficCollectorMetricsDao extends AccountsContextDao { + + public static final TrafficCollectorMetricsDao instance = new TrafficCollectorMetricsDao(); + + @Override + public String getCollName() { + return "traffic_collector_metrics"; + } + + @Override + public Class getClassT() { + return TrafficCollectorMetrics.class; + } + + public static final int maxDocuments = 10_000; + public static final int sizeInBytes = 10_000_000; + + public void createIndicesIfAbsent() { + boolean exists = false; + String dbName = Context.accountId.get()+""; + MongoDatabase db = clients[0].getDatabase(dbName); + for (String col: db.listCollectionNames()){ + if (getCollName().equalsIgnoreCase(col)){ + exists = true; + break; + } + }; + + if (!exists) { + if (DbMode.allowCappedCollections()) { + db.createCollection(getCollName(), new CreateCollectionOptions().capped(true).maxDocuments(maxDocuments).sizeInBytes(sizeInBytes)); + } else { + db.createCollection(getCollName()); + } + } + } + + public void updateCount(TrafficCollectorMetrics trafficCollectorMetrics) { + List updates = new ArrayList<>(); + Map requestsCountMapPerMinute = trafficCollectorMetrics.getRequestsCountMapPerMinute(); + if (requestsCountMapPerMinute == null || requestsCountMapPerMinute.isEmpty()) return; + for (String key: requestsCountMapPerMinute.keySet()) { + updates.add(Updates.inc(TrafficCollectorMetrics.REQUESTS_COUNT_MAP_PER_MINUTE + "." + key, requestsCountMapPerMinute.getOrDefault(key, 0))); + } + instance.updateOne( + Filters.and( + Filters.eq("_id", trafficCollectorMetrics.getId()), + Filters.eq(TrafficCollectorMetrics.BUCKET_START_EPOCH, trafficCollectorMetrics.getBucketStartEpoch()), + Filters.eq(TrafficCollectorMetrics.BUCKET_END_EPOCH, trafficCollectorMetrics.getBucketEndEpoch()) + ), + Updates.combine(updates) + ); + } +} diff --git a/libs/dao/src/main/java/com/akto/dao/traffic_metrics/RuntimeMetricsDao.java b/libs/dao/src/main/java/com/akto/dao/traffic_metrics/RuntimeMetricsDao.java new file mode 100644 index 0000000000..e8bb16ae9a --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dao/traffic_metrics/RuntimeMetricsDao.java @@ -0,0 +1,82 @@ +package com.akto.dao.traffic_metrics; + +import java.util.ArrayList; + +import org.bson.conversions.Bson; + +import com.akto.dao.AccountsContextDao; +import com.akto.dao.MCollection; +import com.akto.dao.context.Context; +import com.akto.dto.traffic_metrics.RuntimeMetrics; +import com.akto.dto.type.URLMethods; +import com.akto.util.DbMode; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.WriteModel; + +public class RuntimeMetricsDao extends AccountsContextDao { + + public static final RuntimeMetricsDao instance = new RuntimeMetricsDao(); + public static final int maxDocuments = 100_000; + public static final int sizeInBytes = 100_000_000; + + @Override + public String getCollName() { + return "runtime_metrics"; + } + + @Override + public Class getClassT() { + return RuntimeMetrics.class; + } + + public void createIndicesIfAbsent() { + boolean exists = false; + String dbName = Context.accountId.get()+""; + MongoDatabase db = clients[0].getDatabase(dbName); + for (String col: db.listCollectionNames()){ + if (getCollName().equalsIgnoreCase(col)){ + exists = true; + break; + } + }; + + if (!exists) { + db.createCollection(getCollName()); + } + + if (!exists) { + if (DbMode.allowCappedCollections()) { + db.createCollection(getCollName(), new CreateCollectionOptions().capped(true).maxDocuments(maxDocuments).sizeInBytes(sizeInBytes)); + } else { + db.createCollection(getCollName()); + } + } + + MCollection.createIndexIfAbsent(getDBName(), getCollName(), + new String[] { "timestamp" }, true); + MCollection.createIndexIfAbsent(getDBName(), getCollName(), + new String[] { "timestamp", "instanceId" }, true); + } + + public static void bulkInsertMetrics(ArrayList> bulkUpdates) { + RuntimeMetricsDao.instance. getMCollection().bulkWrite(bulkUpdates); + } + + public static Bson buildFilters(int startTs, int endTs) { + return Filters.and( + Filters.gte("timestamp", startTs), + Filters.lte("timestamp", endTs) + ); + } + + public static Bson buildFilters(int startTs, int endTs, String instanceId) { + return Filters.and( + Filters.gte("timestamp", startTs), + Filters.lte("timestamp", endTs), + Filters.eq("instanceId", instanceId) + ); + } + +} diff --git a/libs/dao/src/main/java/com/akto/dto/AccountSettings.java b/libs/dao/src/main/java/com/akto/dto/AccountSettings.java index 6de5d55a20..2c9f52884c 100644 --- a/libs/dao/src/main/java/com/akto/dto/AccountSettings.java +++ b/libs/dao/src/main/java/com/akto/dto/AccountSettings.java @@ -98,6 +98,10 @@ public class AccountSettings { public static final String ALLOW_REDUNDANT_ENDPOINTS_LIST = "allowRedundantEndpointsList"; private List allowRedundantEndpointsList; + private static final List defaultCidrRangesList = Arrays.asList("10.0.0.0/8", "172.16.0.0/12", + "192.168.0.0/16", "127.0.0.0/8", "169.254.0.0/16", "224.0.0.0/4", "192.0.2.0/24", "198.51.100.0/24", + "203.0.113.0/24", "255.255.255.255/32", "100.64.0.0/10", "192.88.99.0/24", "240.0.0.0/4"); + public AccountSettings() { } @@ -174,6 +178,9 @@ public void setId(int id) { } public List getPrivateCidrList() { + if(this.privateCidrList == null || this.privateCidrList.isEmpty()){ + return defaultCidrRangesList; + } return privateCidrList; } @@ -368,12 +375,12 @@ public void setPartnerIpList(List partnerIpList) { public List getAllowRedundantEndpointsList() { if(this.allowRedundantEndpointsList == null) { List ignoreUrlTypesList = Arrays.asList( - "htm","html", "css", "js", // Web formats + "htm","html", "css", "js", "js.map", // Web formats "jpg", "jpeg", "png", "gif", "svg", "webp", // Image formats "mp4", "webm", "ogg", "ogv", "avi", "mov", // Video formats "mp3", "wav", "oga", // Audio formats "woff", "woff2", "ttf", "otf", // Font formats - ".pptx", ".json" // file formats + "pptx", "json", "ico" // file formats ); return ignoreUrlTypesList; } diff --git a/libs/dao/src/main/java/com/akto/dto/ApiCollection.java b/libs/dao/src/main/java/com/akto/dto/ApiCollection.java index 8a117cf160..80b0e6c8bb 100644 --- a/libs/dao/src/main/java/com/akto/dto/ApiCollection.java +++ b/libs/dao/src/main/java/com/akto/dto/ApiCollection.java @@ -90,6 +90,7 @@ public ApiCollection(int id, String name, List conditions) { this.name = name; this.conditions = conditions; this.type = Type.API_GROUP; + this.startTs = Context.now(); } public static boolean useHost = true; diff --git a/libs/dao/src/main/java/com/akto/dto/ApiCollectionUsers.java b/libs/dao/src/main/java/com/akto/dto/ApiCollectionUsers.java index 318821ad57..743b6b2654 100644 --- a/libs/dao/src/main/java/com/akto/dto/ApiCollectionUsers.java +++ b/libs/dao/src/main/java/com/akto/dto/ApiCollectionUsers.java @@ -26,7 +26,11 @@ import com.akto.dao.context.Context; import com.akto.dao.demo.VulnerableRequestForTemplateDao; import com.akto.dao.testing_run_findings.TestingRunIssuesDao; +import com.akto.dto.testing.CustomTestingEndpoints; +import com.akto.dto.testing.SensitiveDataEndpoints; import com.akto.dto.testing.TestingEndpoints; +import com.akto.dto.testing.custom_groups.AllAPIsGroup; +import com.akto.dto.testing.custom_groups.UnauthenticatedEndpoint; import com.akto.dto.type.SingleTypeInfo; import com.akto.util.Constants; import com.mongodb.BasicDBObject; @@ -141,6 +145,22 @@ public static void removeFromCollectionsForCollectionId(List c } public static void computeCollectionsForCollectionId(List conditions, int apiCollectionId) { + + if(UnauthenticatedEndpoint.UNAUTHENTICATED_GROUP_ID == apiCollectionId){ + UnauthenticatedEndpoint.updateCollections(); + return; + } + + if(SensitiveDataEndpoints.API_GROUP_ID == apiCollectionId){ + SensitiveDataEndpoints.updateCollections(); + return; + } + + if(AllAPIsGroup.ALL_APIS_GROUP_ID == apiCollectionId){ + AllAPIsGroup.updateCollections(); + return; + } + addToCollectionsForCollectionId(conditions, apiCollectionId); removeFromCollectionsForCollectionId(conditions, apiCollectionId); updateApiCollection(conditions, apiCollectionId); @@ -222,4 +242,9 @@ private static void updateCollectionInBatches(MCollection collection, Bson fi logger.info("Total time taken : " + (Context.now() - time) + " for " + collection.getCollName() + " in account id: " + accountId); } + public static void reset(int apiCollectionId) { + CustomTestingEndpoints ep = new CustomTestingEndpoints(new ArrayList<>()); + removeFromCollectionsForCollectionId(Collections.singletonList(ep), apiCollectionId); + } + } diff --git a/libs/dao/src/main/java/com/akto/dto/CodeAnalysisApi.java b/libs/dao/src/main/java/com/akto/dto/CodeAnalysisApi.java index 839f95e249..080dbcf711 100644 --- a/libs/dao/src/main/java/com/akto/dto/CodeAnalysisApi.java +++ b/libs/dao/src/main/java/com/akto/dto/CodeAnalysisApi.java @@ -5,14 +5,27 @@ public class CodeAnalysisApi { private String method; private String endpoint; private CodeAnalysisApiLocation location; + private String requestBody; + private String responseBody; public CodeAnalysisApi() { } - public CodeAnalysisApi(String method, String endpoint, CodeAnalysisApiLocation location) { + public CodeAnalysisApi(String method, String endpoint, CodeAnalysisApiLocation location, String requestBody, String responseBody) { this.method = method; this.endpoint = endpoint; this.location = location; + this.requestBody = requestBody; + this.responseBody = responseBody; + } + + @Override + public String toString() { + return "CodeAnalysisApi{" + + "method='" + method + '\'' + + ", endpoint='" + endpoint + '\'' + + ", location=" + location + + '}'; } public String generateCodeAnalysisApisMapKey() { @@ -42,4 +55,20 @@ public CodeAnalysisApiLocation getLocation() { public void setLocation(CodeAnalysisApiLocation location) { this.location = location; } -} + + public String getRequestBody() { + return requestBody; + } + + public void setRequestBody(String requestBody) { + this.requestBody = requestBody; + } + + public String getResponseBody() { + return responseBody; + } + + public void setResponseBody(String responseBody) { + this.responseBody = responseBody; + } +} \ No newline at end of file diff --git a/libs/dao/src/main/java/com/akto/dto/CodeAnalysisApiInfo.java b/libs/dao/src/main/java/com/akto/dto/CodeAnalysisApiInfo.java index 3b69609e0d..9788c19335 100644 --- a/libs/dao/src/main/java/com/akto/dto/CodeAnalysisApiInfo.java +++ b/libs/dao/src/main/java/com/akto/dto/CodeAnalysisApiInfo.java @@ -12,6 +12,11 @@ public class CodeAnalysisApiInfo { public static final String ID = "_id"; private CodeAnalysisApiLocation location; public static final String LOCATION = "location"; + + public static final String DISCOVERED_TS = "discoveredTs"; + private int discoveredTs; + public static final String LAST_SEEN_TS = "lastSeenTs"; + private int lastSeenTs; public static class CodeAnalysisApiInfoKey { private ObjectId codeAnalysisCollectionId; @@ -56,9 +61,11 @@ public void setEndpoint(String endpoint) { public CodeAnalysisApiInfo() { } - public CodeAnalysisApiInfo(CodeAnalysisApiInfoKey id, CodeAnalysisApiLocation location) { + public CodeAnalysisApiInfo(CodeAnalysisApiInfoKey id, CodeAnalysisApiLocation location, int discoveredTs, int lastSeenTs) { this.id = id; this.location = location; + this.discoveredTs = discoveredTs; + this.lastSeenTs = lastSeenTs; } public CodeAnalysisApiInfoKey getId() { @@ -80,4 +87,20 @@ public void setLocation(CodeAnalysisApiLocation location) { public String generateCodeAnalysisApisMapKey() { return id.getMethod() + " " + id.getEndpoint(); } + + public int getDiscoveredTs() { + return discoveredTs; + } + + public void setDiscoveredTs(int discoveredTs) { + this.discoveredTs = discoveredTs; + } + + public int getLastSeenTs() { + return lastSeenTs; + } + + public void setLastSeenTs(int lastSeenTs) { + this.lastSeenTs = lastSeenTs; + } } diff --git a/libs/dao/src/main/java/com/akto/dto/CodeAnalysisCollection.java b/libs/dao/src/main/java/com/akto/dto/CodeAnalysisCollection.java index cc03ccf931..02f451c81a 100644 --- a/libs/dao/src/main/java/com/akto/dto/CodeAnalysisCollection.java +++ b/libs/dao/src/main/java/com/akto/dto/CodeAnalysisCollection.java @@ -14,12 +14,16 @@ public class CodeAnalysisCollection { private String projectDir; public static final String PROJECT_DIR = "projectDir"; + public static final String API_COLLECTION_ID = "apiCollectionId"; + private int apiCollectionId; + public CodeAnalysisCollection() { } - public CodeAnalysisCollection(String name, String projectDir) { + public CodeAnalysisCollection(String name, String projectDir, int apiCollectionId) { this.name = name; this.projectDir = projectDir; + this.apiCollectionId = apiCollectionId; } public ObjectId getId() { @@ -46,4 +50,12 @@ public String getProjectDir() { public void setProjectDir(String projectDir) { this.projectDir = projectDir; } + + public int getApiCollectionId() { + return apiCollectionId; + } + + public void setApiCollectionId(int apiCollectionId) { + this.apiCollectionId = apiCollectionId; + } } diff --git a/libs/dao/src/main/java/com/akto/dto/CodeAnalysisRepo.java b/libs/dao/src/main/java/com/akto/dto/CodeAnalysisRepo.java new file mode 100644 index 0000000000..1dfae11b59 --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/CodeAnalysisRepo.java @@ -0,0 +1,103 @@ +package com.akto.dto; + +import org.bson.codecs.pojo.annotations.BsonIgnore; +import org.bson.types.ObjectId; + +public class CodeAnalysisRepo { + + private ObjectId id; + private String projectName; + public static final String PROJECT_NAME = "projectName"; + private String repoName; + public static final String REPO_NAME = "repoName"; + private int lastRun; + public static final String LAST_RUN = "lastRun"; + private int scheduleTime; + public static final String SCHEDULE_TIME = "scheduleTime"; + + public enum SourceCodeType { + BITBUCKET, GITHUB + } + + private SourceCodeType sourceCodeType; + public static final String SOURCE_CODE_TYPE = "sourceCodeType"; + + @BsonIgnore + private String hexId; + + public CodeAnalysisRepo(ObjectId id, String projectName, String repoName, int lastRun, int scheduleTime) { + this.id = id; + this.projectName = projectName; + this.repoName = repoName; + this.lastRun = lastRun; + this.scheduleTime = scheduleTime; + } + + public CodeAnalysisRepo() { + } + + public ObjectId getId() { + return id; + } + + public void setId(ObjectId id) { + this.id = id; + if (id != null) this.hexId = id.toHexString(); + } + + public String getRepoName() { + return repoName; + } + + public void setRepoName(String repoName) { + this.repoName = repoName; + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public int getScheduleTime() { + return scheduleTime; + } + + public void setScheduleTime(int scheduleTime) { + this.scheduleTime = scheduleTime; + } + + public int getLastRun() { + return lastRun; + } + + public void setLastRun(int lastRun) { + this.lastRun = lastRun; + } + + public String getHexId() { + return this.id != null ? this.id.toHexString() : null; + } + + public void setHexId(String hexId) { + this.hexId = hexId; + } + public SourceCodeType getSourceCodeType() { + if (sourceCodeType == null) { + return SourceCodeType.BITBUCKET; + } + return sourceCodeType; + } + + public void setSourceCodeType(SourceCodeType sourceCodeType) { + this.sourceCodeType = sourceCodeType; + } + + @Override + public String toString() { + return "Project: " + this.projectName + " Repository:" + this.getRepoName(); + } + +} \ No newline at end of file diff --git a/libs/dao/src/main/java/com/akto/dto/Config.java b/libs/dao/src/main/java/com/akto/dto/Config.java index f94a5460d4..d45e48be4c 100644 --- a/libs/dao/src/main/java/com/akto/dto/Config.java +++ b/libs/dao/src/main/java/com/akto/dto/Config.java @@ -26,7 +26,7 @@ public void setId(String id) { String id; public enum ConfigType { - SLACK, GOOGLE, WEBPUSH, PASSWORD, SALESFORCE, SENDGRID, AUTH0, GITHUB, STIGG, MIXPANEL, SLACK_ALERT, OKTA, AZURE, HYBRID_SAAS, SLACK_ALERT_USAGE; + SLACK, GOOGLE, WEBPUSH, PASSWORD, SALESFORCE, SENDGRID, AUTH0, GITHUB, STIGG, MIXPANEL, SLACK_ALERT, OKTA, AZURE, HYBRID_SAAS, SLACK_ALERT_USAGE, SLACK_ALERT_CYBORG; } ConfigType configType; @@ -619,6 +619,27 @@ public void setSlackWebhookUrl(String slackWebhookUrl) { this.slackWebhookUrl = slackWebhookUrl; } } + + @BsonDiscriminator + public static class SlackAlertCyborgConfig extends Config { + private String slackWebhookUrl; + + public static final String CONFIG_ID = ConfigType.SLACK_ALERT_CYBORG.name() + CONFIG_SALT; + + public SlackAlertCyborgConfig() { + this.configType = ConfigType.SLACK_ALERT_CYBORG; + this.id = CONFIG_ID; + } + + public String getSlackWebhookUrl() { + return slackWebhookUrl; + } + + public void setSlackWebhookUrl(String slackWebhookUrl) { + this.slackWebhookUrl = slackWebhookUrl; + } + } + @BsonDiscriminator public static class HybridSaasConfig extends Config { String privateKey; diff --git a/libs/dao/src/main/java/com/akto/dto/PendingInviteCode.java b/libs/dao/src/main/java/com/akto/dto/PendingInviteCode.java index eaa7c7fb5a..4eeb3c7741 100644 --- a/libs/dao/src/main/java/com/akto/dto/PendingInviteCode.java +++ b/libs/dao/src/main/java/com/akto/dto/PendingInviteCode.java @@ -11,6 +11,7 @@ public class PendingInviteCode { private String inviteeEmailId; private long expiry; private int accountId; + private RBAC.Role inviteeRole; public PendingInviteCode() { } @@ -21,8 +22,17 @@ public PendingInviteCode(String inviteCode, int issuer, String inviteeEmailId, l this.inviteeEmailId = inviteeEmailId; this.expiry = expiry; this.accountId = accountId; + this.inviteeRole = RBAC.Role.GUEST; } + public PendingInviteCode(String inviteCode, int issuer, String inviteeEmailId, long expiry, int accountId, RBAC.Role inviteeRole) { + this.inviteCode = inviteCode; + this.issuer = issuer; + this.inviteeEmailId = inviteeEmailId; + this.expiry = expiry; + this.accountId = accountId; + this.inviteeRole = inviteeRole; + } public ObjectId getId() { return id; } @@ -70,4 +80,12 @@ public int getAccountId() { public void setAccountId(int accountId) { this.accountId = accountId; } + + public RBAC.Role getInviteeRole() { + return inviteeRole; + } + + public void setInviteeRole(RBAC.Role inviteeRole) { + this.inviteeRole = inviteeRole; + } } diff --git a/libs/dao/src/main/java/com/akto/dto/RBAC.java b/libs/dao/src/main/java/com/akto/dto/RBAC.java index f1c8b2ddc5..10ee815241 100644 --- a/libs/dao/src/main/java/com/akto/dto/RBAC.java +++ b/libs/dao/src/main/java/com/akto/dto/RBAC.java @@ -3,6 +3,11 @@ import org.bson.types.ObjectId; +import com.akto.dto.rbac.*; + +import com.akto.dto.rbac.RbacEnums.Feature; +import com.akto.dto.rbac.RbacEnums.ReadWriteAccess; + public class RBAC { private ObjectId id; @@ -15,7 +20,30 @@ public class RBAC { public static final String ACCOUNT_ID = "accountId"; public enum Role { - ADMIN, MEMBER + ADMIN("ADMIN",new AdminRoleStrategy()), + MEMBER("SECURITY ENGINEER", new MemberRoleStrategy()), + DEVELOPER("DEVELOPER", new DeveloperRoleStrategy()), + GUEST("GUEST", new GuestRoleStrategy()); + + private final RoleStrategy roleStrategy; + private String name; + + Role(String name ,RoleStrategy roleStrategy) { + this.roleStrategy = roleStrategy; + this.name = name; + } + + public Role[] getRoleHierarchy() { + return roleStrategy.getRoleHierarchy(); + } + + public ReadWriteAccess getReadWriteAccessForFeature(Feature feature) { + return roleStrategy.getFeatureAccessMap().getOrDefault(feature, ReadWriteAccess.READ); + } + + public String getName() { + return name; + } } public RBAC(int userId, Role role) { diff --git a/libs/dao/src/main/java/com/akto/dto/rbac/AdminRoleStrategy.java b/libs/dao/src/main/java/com/akto/dto/rbac/AdminRoleStrategy.java new file mode 100644 index 0000000000..08b70810fd --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/rbac/AdminRoleStrategy.java @@ -0,0 +1,31 @@ +package com.akto.dto.rbac; + +import java.util.HashMap; +import java.util.Map; + +import com.akto.dto.rbac.RbacEnums.AccessGroups; +import com.akto.dto.rbac.RbacEnums.Feature; +import com.akto.dto.rbac.RbacEnums.ReadWriteAccess; +import com.akto.dto.RBAC.Role; + +public class AdminRoleStrategy implements RoleStrategy { + @Override + public Role[] getRoleHierarchy() { + return new Role[]{Role.ADMIN, Role.MEMBER, Role.DEVELOPER, Role.GUEST}; + } + + @Override + public Map getFeatureAccessMap() { + return createAccessMap(AccessGroups.getAccessGroups(), ReadWriteAccess.READ_WRITE); + } + + private Map createAccessMap(AccessGroups[] groups, ReadWriteAccess access) { + Map accessMap = new HashMap<>(); + for (AccessGroups group : groups) { + for (Feature feature : Feature.getFeaturesForAccessGroup(group)) { + accessMap.put(feature, access); + } + } + return accessMap; + } +} \ No newline at end of file diff --git a/libs/dao/src/main/java/com/akto/dto/rbac/DeveloperRoleStrategy.java b/libs/dao/src/main/java/com/akto/dto/rbac/DeveloperRoleStrategy.java new file mode 100644 index 0000000000..6bbc971d53 --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/rbac/DeveloperRoleStrategy.java @@ -0,0 +1,31 @@ +package com.akto.dto.rbac; +import java.util.HashMap; +import java.util.Map; + +import com.akto.dto.rbac.RbacEnums.AccessGroups; +import com.akto.dto.rbac.RbacEnums.Feature; +import com.akto.dto.rbac.RbacEnums.ReadWriteAccess; +import com.akto.dto.RBAC.Role; + +public class DeveloperRoleStrategy implements RoleStrategy{ + @Override + public Role[] getRoleHierarchy() { + return new Role[]{Role.DEVELOPER, Role.GUEST}; + } + + @Override + public Map getFeatureAccessMap() { + Map accessMap = new HashMap<>(); + for (AccessGroups group : AccessGroups.getAccessGroups()) { + ReadWriteAccess access = ReadWriteAccess.READ ; + if(group == AccessGroups.SETTINGS ){ + access = ReadWriteAccess.READ_WRITE; + } + for (Feature feature : Feature.getFeaturesForAccessGroup(group)) { + accessMap.put(feature, access); + } + } + RbacEnums.mergeUserFeaturesAccess(accessMap); + return accessMap; + } +} diff --git a/libs/dao/src/main/java/com/akto/dto/rbac/GuestRoleStrategy.java b/libs/dao/src/main/java/com/akto/dto/rbac/GuestRoleStrategy.java new file mode 100644 index 0000000000..c2bf625127 --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/rbac/GuestRoleStrategy.java @@ -0,0 +1,28 @@ +package com.akto.dto.rbac; +import java.util.HashMap; +import java.util.Map; + +import com.akto.dto.rbac.RbacEnums.AccessGroups; +import com.akto.dto.rbac.RbacEnums.Feature; +import com.akto.dto.rbac.RbacEnums.ReadWriteAccess; +import com.akto.dto.RBAC.Role; + +public class GuestRoleStrategy implements RoleStrategy{ + @Override + public Role[] getRoleHierarchy() { + return new Role[]{Role.GUEST}; + } + + @Override + public Map getFeatureAccessMap() { + Map accessMap = new HashMap<>(); + for (AccessGroups group : AccessGroups.getAccessGroups()) { + ReadWriteAccess access = ReadWriteAccess.READ ; + for (Feature feature : Feature.getFeaturesForAccessGroup(group)) { + accessMap.put(feature, access); + } + } + RbacEnums.mergeUserFeaturesAccess(accessMap); + return accessMap; + } +} diff --git a/libs/dao/src/main/java/com/akto/dto/rbac/MemberRoleStrategy.java b/libs/dao/src/main/java/com/akto/dto/rbac/MemberRoleStrategy.java new file mode 100644 index 0000000000..d632dd033b --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/rbac/MemberRoleStrategy.java @@ -0,0 +1,31 @@ +package com.akto.dto.rbac; +import java.util.HashMap; +import java.util.Map; + +import com.akto.dto.rbac.RbacEnums.AccessGroups; +import com.akto.dto.rbac.RbacEnums.Feature; +import com.akto.dto.rbac.RbacEnums.ReadWriteAccess; +import com.akto.dto.RBAC.Role; + +public class MemberRoleStrategy implements RoleStrategy{ + @Override + public Role[] getRoleHierarchy() { + return new Role[]{Role.MEMBER, Role.DEVELOPER, Role.GUEST}; + } + + @Override + public Map getFeatureAccessMap() { + Map accessMap = new HashMap<>(); + for (AccessGroups group : AccessGroups.getAccessGroups()) { + ReadWriteAccess access = ReadWriteAccess.READ ; + if(group != AccessGroups.SETTINGS && group != AccessGroups.ADMIN){ + access = ReadWriteAccess.READ_WRITE; + } + for (Feature feature : Feature.getFeaturesForAccessGroup(group)) { + accessMap.put(feature, access); + } + } + RbacEnums.mergeUserFeaturesAccess(accessMap); + return accessMap; + } +} diff --git a/libs/dao/src/main/java/com/akto/dto/rbac/RbacEnums.java b/libs/dao/src/main/java/com/akto/dto/rbac/RbacEnums.java new file mode 100644 index 0000000000..cbe9f0b19b --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/rbac/RbacEnums.java @@ -0,0 +1,73 @@ +package com.akto.dto.rbac; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class RbacEnums { + + public enum AccessGroups { + INVENTORY, + TESTING, + TEST_LIBRARY, + SETTINGS, + ADMIN, + USER; + + public static AccessGroups[] getAccessGroups() { + return values(); + } + } + + public enum Feature { + API_COLLECTIONS(AccessGroups.INVENTORY), + SENSITIVE_DATA(AccessGroups.INVENTORY), + TRAFFIC_FILTERS(AccessGroups.INVENTORY), + DEFAULT_PAYLOADS(AccessGroups.INVENTORY), + SAMPLE_DATA(AccessGroups.INVENTORY), + TAGS(AccessGroups.INVENTORY), + ASK_GPT(AccessGroups.INVENTORY), + START_TEST_RUN(AccessGroups.TESTING), + TEST_RESULTS(AccessGroups.TESTING), + TEST_ROLES(AccessGroups.TESTING), + USER_CONFIG(AccessGroups.TESTING), + AUTH_TYPE(AccessGroups.TESTING), + ISSUES(AccessGroups.TESTING), + TEST_EDITOR(AccessGroups.TEST_LIBRARY), + EXTERNAL_TEST_LIBRARY(AccessGroups.TEST_LIBRARY), + INTEGRATIONS(AccessGroups.SETTINGS), + METRICS(AccessGroups.SETTINGS), + LOGS(AccessGroups.SETTINGS), + BILLING(AccessGroups.SETTINGS), + INVITE_MEMBERS(AccessGroups.SETTINGS), + ADMIN_ACTIONS(AccessGroups.ADMIN), + USER_ACTIONS(AccessGroups.USER); + private final AccessGroups accessGroup; + + Feature(AccessGroups accessGroup) { + this.accessGroup = accessGroup; + } + + public AccessGroups getAccessGroup() { + return accessGroup; + } + + public static List getFeaturesForAccessGroup(AccessGroups accessGroup) { + return Arrays.stream(values()) + .filter(feature -> feature.getAccessGroup() == accessGroup) + .collect(Collectors.toList()); + } + } + + public enum ReadWriteAccess { + READ, + READ_WRITE + } + + public static void mergeUserFeaturesAccess (Map accessMap){ + for(Feature feature: Feature.getFeaturesForAccessGroup(AccessGroups.USER)){ + accessMap.put(feature, ReadWriteAccess.READ_WRITE); + } + } +} diff --git a/libs/dao/src/main/java/com/akto/dto/rbac/RoleStrategy.java b/libs/dao/src/main/java/com/akto/dto/rbac/RoleStrategy.java new file mode 100644 index 0000000000..c24a68e0ad --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/rbac/RoleStrategy.java @@ -0,0 +1,11 @@ +package com.akto.dto.rbac; + +import java.util.Map; +import com.akto.dto.RBAC.Role; +import com.akto.dto.rbac.RbacEnums.Feature; +import com.akto.dto.rbac.RbacEnums.ReadWriteAccess; + +public interface RoleStrategy { + Role[] getRoleHierarchy(); + Map getFeatureAccessMap(); +} \ No newline at end of file diff --git a/libs/dao/src/main/java/com/akto/dto/testing/SensitiveDataEndpoints.java b/libs/dao/src/main/java/com/akto/dto/testing/SensitiveDataEndpoints.java new file mode 100644 index 0000000000..520a95a3ce --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/testing/SensitiveDataEndpoints.java @@ -0,0 +1,208 @@ +package com.akto.dto.testing; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.bson.codecs.pojo.annotations.BsonIgnore; +import org.bson.conversions.Bson; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.akto.dao.MCollection; +import com.akto.dao.SensitiveParamInfoDao; +import com.akto.dao.SingleTypeInfoDao; +import com.akto.dao.context.Context; +import com.akto.dto.ApiCollectionUsers.CollectionType; +import com.akto.dto.ApiInfo; +import com.akto.dto.ApiInfo.ApiInfoKey; +import com.akto.dto.ApiCollectionUsers; +import com.akto.dto.SensitiveParamInfo; +import com.akto.dto.type.SingleTypeInfo; +import com.akto.dto.type.URLMethods; +import com.akto.dto.type.URLMethods.Method; +import com.akto.util.Constants; +import com.mongodb.BasicDBObject; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.model.Accumulators; +import com.mongodb.client.model.Aggregates; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.Projections; +import com.mongodb.client.model.Sorts; + +public class SensitiveDataEndpoints extends TestingEndpoints { + + public static final int LIMIT = 50; + private static final Logger logger = LoggerFactory.getLogger(SensitiveDataEndpoints.class); + + public SensitiveDataEndpoints() { + super(Type.SENSITIVE_DATA); + } + + public SensitiveDataEndpoints(Operator operator) { + super(Type.SENSITIVE_DATA, operator); + } + + @BsonIgnore + List urls; + + public List getUrls() { + return urls; + } + + public void setUrls(List urls) { + this.urls = urls; + } + + @Override + public List returnApis() { + return urls; + } + + public final static int API_GROUP_ID = 111_111_999; + + public static void updateCollections() { + ApiCollectionUsers.reset(API_GROUP_ID); + SingleTypeInfo.fetchCustomDataTypes(1000000); + Set responseCodes = SingleTypeInfoDao.instance.findDistinctFields(SingleTypeInfo._RESPONSE_CODE, + Integer.class, Filters.exists(SingleTypeInfo._RESPONSE_CODE)); + Set subTypes = SingleTypeInfoDao.instance.findDistinctFields(SingleTypeInfo.SUB_TYPE, String.class, + Filters.exists(SingleTypeInfo.SUB_TYPE)); + + Set sensitiveInResponse = new HashSet<>(SingleTypeInfoDao.instance.sensitiveSubTypeInResponseNames()); + Set sensitiveInRequest = new HashSet<>(SingleTypeInfoDao.instance.sensitiveSubTypeInRequestNames()); + + List localUrls = new ArrayList<>(); + List customSensitiveList = SensitiveParamInfoDao.instance.findAll( + Filters.eq("sensitive", true)); + for (SensitiveParamInfo sensitiveParamInfo : customSensitiveList) { + localUrls.add(new ApiInfoKey(sensitiveParamInfo.getApiCollectionId(), sensitiveParamInfo.getUrl(), + Method.valueOf(sensitiveParamInfo.getMethod()))); + } + + SensitiveDataEndpoints sensitiveDataEndpoints = new SensitiveDataEndpoints(); + if (localUrls != null && !localUrls.isEmpty()) { + sensitiveDataEndpoints.setUrls(new ArrayList<>(localUrls)); + ApiCollectionUsers.addToCollectionsForCollectionId( + Collections.singletonList(sensitiveDataEndpoints), API_GROUP_ID); + } + + final String LAST_TIMESTAMP = "lastTimestamp"; + + for (int responseCode : responseCodes) { + for (String subType : subTypes) { + + Bson responseCodeFilter = Filters.eq(SingleTypeInfo._RESPONSE_CODE, responseCode); + Bson subTypeFilter = Filters.eq(SingleTypeInfo.SUB_TYPE, subType); + + if ((responseCode == -1 && sensitiveInRequest.contains(subType)) + || (responseCode != -1 && sensitiveInResponse.contains(subType))) { + + int timestamp = Context.now() + Constants.ONE_DAY_TIMESTAMP; + + logger.info(String.format("AccountId: %d Starting update sensitive data collection for %d %s", + Context.accountId.get(), responseCode, subType)); + int skip = 0; + while (true) { + List pipeline = new ArrayList<>(); + pipeline.add(Aggregates.sort(Sorts.descending(SingleTypeInfo._TIMESTAMP))); + pipeline.add(Aggregates.match( + Filters.and( + responseCodeFilter, subTypeFilter, + Filters.lt(SingleTypeInfo._TIMESTAMP, timestamp)))); + pipeline.add(Aggregates.skip(skip)); + pipeline.add(Aggregates.limit(LIMIT)); + BasicDBObject groupedId = new BasicDBObject(SingleTypeInfo._API_COLLECTION_ID, + "$apiCollectionId") + .append(SingleTypeInfo._URL, "$url") + .append(SingleTypeInfo._METHOD, "$method"); + + Bson projections = Projections.fields( + Projections.include(SingleTypeInfo._TIMESTAMP, + SingleTypeInfo._API_COLLECTION_ID, SingleTypeInfo._URL, + SingleTypeInfo._METHOD)); + + pipeline.add(Aggregates.project(projections)); + pipeline.add(Aggregates.group(groupedId, + Accumulators.last(LAST_TIMESTAMP, "$timestamp"))); + + MongoCursor endpointsCursor = SingleTypeInfoDao.instance.getMCollection() + .aggregate(pipeline, BasicDBObject.class).cursor(); + + Set endpoints = new HashSet<>(); + while (endpointsCursor.hasNext()) { + BasicDBObject v = endpointsCursor.next(); + try { + BasicDBObject vv = (BasicDBObject) v.get("_id"); + ApiInfo.ApiInfoKey apiInfoKey = new ApiInfo.ApiInfoKey( + (int) vv.get("apiCollectionId"), + (String) vv.get("url"), + URLMethods.Method.fromString((String) vv.get("method"))); + endpoints.add(apiInfoKey); + int localTimestamp = v.getInt(LAST_TIMESTAMP); + timestamp = Math.min(timestamp, localTimestamp); + } catch (Exception e) { + e.printStackTrace(); + } + } + + if (!endpoints.isEmpty()) { + logger.info(String.format( + "AccountId: %d Running update sensitive data collection for %d %s with endpoints %d skip %d", + Context.accountId.get(), responseCode, subType, endpoints.size(), skip)); + timestamp = timestamp + 1; + sensitiveDataEndpoints.setUrls(new ArrayList<>(endpoints)); + ApiCollectionUsers.addToCollectionsForCollectionId( + Collections.singletonList(sensitiveDataEndpoints), API_GROUP_ID); + skip += LIMIT; + continue; + } + logger.info(String.format("AccountId: %d Finished update sensitive data collection for %d %s", + Context.accountId.get(), responseCode, subType)); + break; + } + } + } + } + } + + @Override + public boolean containsApi(ApiInfoKey key) { + Bson filterStandardSensitiveParams = SingleTypeInfoDao.instance + .filterForSensitiveParamsExcludingUserMarkedSensitive( + key.getApiCollectionId(), key.getUrl(), key.getMethod().name(), null); + List list = SingleTypeInfoDao.instance.findAll(filterStandardSensitiveParams, 0, 1, + null, Projections.exclude("values")); + if (list != null && !list.isEmpty()) { + return true; + } + return false; + } + + private static Bson createApiFilters(CollectionType type, ApiInfoKey api) { + + String prefix = getFilterPrefix(type); + + return Filters.and( + Filters.eq(prefix + SingleTypeInfo._URL, api.getUrl()), + Filters.eq(prefix + SingleTypeInfo._METHOD, api.getMethod().toString()), + Filters.in(SingleTypeInfo._COLLECTION_IDS, api.getApiCollectionId())); + } + + @Override + public Bson createFilters(CollectionType type) { + Set apiSet = new HashSet<>(returnApis()); + List apiFilters = new ArrayList<>(); + if (apiSet != null && !apiSet.isEmpty()) { + for (ApiInfoKey api : apiSet) { + apiFilters.add(createApiFilters(type, api)); + } + return Filters.or(apiFilters); + } + + return MCollection.noMatchFilter; + } + +} diff --git a/libs/dao/src/main/java/com/akto/dto/testing/TestingEndpoints.java b/libs/dao/src/main/java/com/akto/dto/testing/TestingEndpoints.java index d6e419dbb3..353dc40ce6 100644 --- a/libs/dao/src/main/java/com/akto/dto/testing/TestingEndpoints.java +++ b/libs/dao/src/main/java/com/akto/dto/testing/TestingEndpoints.java @@ -38,7 +38,7 @@ public enum Operator { public enum Type { - CUSTOM, COLLECTION_WISE, WORKFLOW, LOGICAL_GROUP, METHOD, ALL, REGEX, RISK_SCORE + CUSTOM, COLLECTION_WISE, WORKFLOW, LOGICAL_GROUP, METHOD, ALL, REGEX, RISK_SCORE, SENSITIVE_DATA, UNAUTHENTICATED } public Type getType() { @@ -76,6 +76,8 @@ public static TestingEndpoints generateCondition(Type type, Operator operator, B case METHOD: condition = new MethodCondition(operator, Method.valueOf(data.getString("method"))); break; + case REGEX: + condition = new RegexTestingEndpoints(operator, data.getString("regex")); default: break; } @@ -128,4 +130,6 @@ public static Boolean checkDeltaUpdateBased(Type type) { return false; } } + + } diff --git a/libs/dao/src/main/java/com/akto/dto/testing/TestingRun.java b/libs/dao/src/main/java/com/akto/dto/testing/TestingRun.java index 5b695f7f33..f321490a33 100644 --- a/libs/dao/src/main/java/com/akto/dto/testing/TestingRun.java +++ b/libs/dao/src/main/java/com/akto/dto/testing/TestingRun.java @@ -1,5 +1,6 @@ package com.akto.dto.testing; +import com.akto.dto.ApiCollection; import org.bson.codecs.pojo.annotations.BsonIgnore; import org.bson.types.ObjectId; @@ -40,9 +41,12 @@ public enum TestingRunType{ ONE_TIME, RECURRING, CI_CD, CONTINUOUS_TESTING } + public static final String SEND_SLACK_ALERT = "sendSlackAlert"; + private boolean sendSlackAlert = false; + public TestingRun() { } - public TestingRun(int scheduleTimestamp, String userEmail, TestingEndpoints testingEndpoints, int testIdConfig, State state, int periodInSeconds, String name, String triggeredBy) { + public TestingRun(int scheduleTimestamp, String userEmail, TestingEndpoints testingEndpoints, int testIdConfig, State state, int periodInSeconds, String name, String triggeredBy, boolean sendSlackAlert) { this.scheduleTimestamp = scheduleTimestamp; this.testRunTime = -1; this.maxConcurrentRequests = -1; @@ -55,8 +59,9 @@ public TestingRun(int scheduleTimestamp, String userEmail, TestingEndpoints test this.periodInSeconds = periodInSeconds; this.name = name; this.triggeredBy = triggeredBy; + this.sendSlackAlert = sendSlackAlert; } - public TestingRun(int scheduleTimestamp, String userEmail, TestingEndpoints testingEndpoints, int testIdConfig, State state, int periodInSeconds, String name, int testRunTime, int maxConcurrentRequests) { + public TestingRun(int scheduleTimestamp, String userEmail, TestingEndpoints testingEndpoints, int testIdConfig, State state, int periodInSeconds, String name, int testRunTime, int maxConcurrentRequests, boolean sendSlackAlert) { this.scheduleTimestamp = scheduleTimestamp; this.testRunTime = testRunTime; this.maxConcurrentRequests = maxConcurrentRequests; @@ -68,6 +73,7 @@ public TestingRun(int scheduleTimestamp, String userEmail, TestingEndpoints test this.state = state; this.periodInSeconds = periodInSeconds; this.name = name; + this.sendSlackAlert = sendSlackAlert; } public TestingRunConfig getTestingRunConfig() { @@ -191,6 +197,18 @@ public void setTriggeredBy(String triggeredBy) { this.triggeredBy = triggeredBy; } + public boolean isSendSlackAlert() { + return sendSlackAlert; + } + + public boolean getSendSlackAlert() { + return sendSlackAlert; + } + + public void setSendSlackAlert(boolean sendSlackAlert) { + this.sendSlackAlert = sendSlackAlert; + } + @Override public String toString() { return "{" + @@ -207,4 +225,13 @@ public String toString() { "}"; } + public static String findTestType(TestingRun testingRun, TestingRunResultSummary trrs) { + String testType = "ONE_TIME"; + if(testingRun.getPeriodInSeconds()>0) testType = "SCHEDULED DAILY"; + if (trrs.getMetadata() != null) testType = "CI_CD"; + return testType; + } + + + } diff --git a/libs/dao/src/main/java/com/akto/dto/testing/TestingRunResult.java b/libs/dao/src/main/java/com/akto/dto/testing/TestingRunResult.java index 3df5b56c22..0e754cd372 100644 --- a/libs/dao/src/main/java/com/akto/dto/testing/TestingRunResult.java +++ b/libs/dao/src/main/java/com/akto/dto/testing/TestingRunResult.java @@ -20,8 +20,10 @@ public class TestingRunResult implements Comparable { public static final String TEST_RUN_ID = "testRunId"; private ObjectId testRunId; + @BsonIgnore private String testRunHexId; + public static final String API_INFO_KEY = "apiInfoKey"; private ApiInfo.ApiInfoKey apiInfoKey; public static final String TEST_SUPER_TYPE = "testSuperType"; @@ -50,6 +52,7 @@ public class TestingRunResult implements Comparable { @BsonIgnore private List multiExecTestResults; + public static final String ERRORS_LIST = "errorsList"; private List errorsList; @@ -180,6 +183,17 @@ public void setTestRunResultSummaryId(ObjectId testRunResultSummaryId) { this.testRunResultSummaryId = testRunResultSummaryId; } + public String getTestRunHexId() { + if (testRunHexId == null && this.testRunId != null) { + return this.testRunId.toHexString(); + } + return this.testRunHexId; + } + + public void setTestRunHexId(String testRunHexId) { + this.testRunHexId = testRunHexId; + } + public String getTestSuperType() { return testSuperType; } @@ -236,15 +250,6 @@ public void setWorkflowTest(WorkflowTest workflowTest) { this.workflowTest = workflowTest; } - public String getTestRunHexId() { - if (testRunHexId == null) return this.testRunId.toHexString(); - return this.testRunHexId; - } - - public void setTestRunHexId(String testRunHexId) { - this.testRunHexId = testRunHexId; - } - public String getTestRunResultSummaryHexId() { if (testRunResultSummaryHexId == null) return this.testRunResultSummaryId.toHexString(); return this.testRunResultSummaryHexId; diff --git a/libs/dao/src/main/java/com/akto/dto/testing/TestingRunResultSummary.java b/libs/dao/src/main/java/com/akto/dto/testing/TestingRunResultSummary.java index 1cb5a55958..673b2b6917 100644 --- a/libs/dao/src/main/java/com/akto/dto/testing/TestingRunResultSummary.java +++ b/libs/dao/src/main/java/com/akto/dto/testing/TestingRunResultSummary.java @@ -33,6 +33,9 @@ public class TestingRunResultSummary { private int testIdConfig; + public static final String IS_NEW_TESTING_RUN_RESULT_SUMMARY = "isNewTestingSummary"; + private boolean isNewTestingSummary = true; + @BsonIgnore private String hexId; @@ -171,4 +174,12 @@ public String toString() { ", testInitiatedCount='" + getTestInitiatedCount() + "'" + "}"; } + + public boolean getNewTestingSummary() { + return isNewTestingSummary; + } + + public void setNewTestingSummary(boolean isNewTestingSummary) { + this.isNewTestingSummary = isNewTestingSummary; + } } diff --git a/libs/dao/src/main/java/com/akto/dto/testing/custom_groups/AllAPIsGroup.java b/libs/dao/src/main/java/com/akto/dto/testing/custom_groups/AllAPIsGroup.java new file mode 100644 index 0000000000..22a72a8c66 --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/testing/custom_groups/AllAPIsGroup.java @@ -0,0 +1,127 @@ +package com.akto.dto.testing.custom_groups; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.bson.codecs.pojo.annotations.BsonIgnore; +import org.bson.conversions.Bson; + +import com.akto.dao.ApiCollectionsDao; +import com.akto.dao.ApiInfoDao; +import com.akto.dao.MCollection; +import com.akto.dao.context.Context; +import com.akto.dto.ApiCollection; +import com.akto.dto.ApiCollectionUsers; +import com.akto.dto.ApiCollectionUsers.CollectionType; +import com.akto.dto.ApiInfo; +import com.akto.dto.ApiInfo.ApiInfoKey; +import com.akto.dto.testing.TestingEndpoints; +import com.akto.dto.type.SingleTypeInfo; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.Projections; +import com.mongodb.client.model.Sorts; + +public class AllAPIsGroup extends TestingEndpoints { + private static int limit = 50; + + @BsonIgnore + private List apiInfos; + + public AllAPIsGroup() { + super(Type.ALL, Operator.OR); + } + + public List getApiInfos() { + return apiInfos; + } + + public void setApiInfos(List apiInfos) { + this.apiInfos = apiInfos; + } + + @Override + public List returnApis() { + return this.apiInfos; + } + + @Override + public boolean containsApi(ApiInfoKey key) { + return true; + } + + private static Bson createApiFilters(CollectionType type, ApiInfoKey api) { + + String prefix = getFilterPrefix(type); + + return Filters.and( + Filters.eq(prefix + SingleTypeInfo._URL, api.getUrl()), + Filters.eq(prefix + SingleTypeInfo._METHOD, api.getMethod().toString()), + Filters.in(SingleTypeInfo._COLLECTION_IDS, api.getApiCollectionId())); + + } + + public final static int ALL_APIS_GROUP_ID = 111_111_121; + + public static void updateCollections(){ + ApiCollectionUsers.reset(ALL_APIS_GROUP_ID); + + List apiCollections = ApiCollectionsDao.instance.findAll( + Filters.ne(ApiCollection._TYPE, ApiCollection.Type.API_GROUP.toString()), Projections.include("_id") + ); + + for(ApiCollection apiCollection: apiCollections){ + + int lastTimeStampRecorded = Context.now() + (5*60) ; + int apiCollectionId = apiCollection.getId(); + int skip = 0 ; + + // create instance of the conditions class + AllAPIsGroup allAPIsGroup = new AllAPIsGroup(); + while (true) { + Bson filterQ = Filters.and( + Filters.eq(ApiInfo.ID_API_COLLECTION_ID, apiCollectionId), + Filters.lt(ApiInfo.LAST_SEEN, lastTimeStampRecorded) + ); + List apiInfosBatched = ApiInfoDao.instance.findAll( + filterQ, skip, limit, Sorts.descending(ApiInfo.LAST_SEEN), Projections.include( + "_id", ApiInfo.LAST_SEEN + ) + ); + + List apiInfoKeysTemp = new ArrayList<>(); + for(ApiInfo apiInfo: apiInfosBatched){ + apiInfoKeysTemp.add(apiInfo.getId()); + lastTimeStampRecorded = Math.min(lastTimeStampRecorded, apiInfo.getLastSeen()); + } + lastTimeStampRecorded += 2; + skip += limit; + + allAPIsGroup.setApiInfos(apiInfoKeysTemp); + ApiCollectionUsers.addToCollectionsForCollectionId(Collections.singletonList(allAPIsGroup), ALL_APIS_GROUP_ID); + + if(apiInfosBatched.size() < limit){ + break; + } + } + } + + } + + + @Override + public Bson createFilters(CollectionType type) { + Set apiSet = new HashSet<>(returnApis()); + List apiFilters = new ArrayList<>(); + if (apiSet != null && !apiSet.isEmpty()) { + for (ApiInfoKey api : apiSet) { + apiFilters.add(createApiFilters(type, api)); + } + return Filters.or(apiFilters); + } + + return MCollection.noMatchFilter; + } +} diff --git a/libs/dao/src/main/java/com/akto/dto/testing/custom_groups/UnauthenticatedEndpoint.java b/libs/dao/src/main/java/com/akto/dto/testing/custom_groups/UnauthenticatedEndpoint.java new file mode 100644 index 0000000000..782a7f4feb --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/testing/custom_groups/UnauthenticatedEndpoint.java @@ -0,0 +1,160 @@ +package com.akto.dto.testing.custom_groups; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.bson.codecs.pojo.annotations.BsonIgnore; +import org.bson.conversions.Bson; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.akto.dao.ApiCollectionsDao; +import com.akto.dao.ApiInfoDao; +import com.akto.dao.MCollection; +import com.akto.dao.context.Context; +import com.akto.dto.ApiCollectionUsers.CollectionType; +import com.akto.dto.ApiCollection; +import com.akto.dto.ApiCollectionUsers; +import com.akto.dto.ApiInfo; +import com.akto.dto.ApiInfo.ApiInfoKey; +import com.akto.dto.ApiInfo.AuthType; +import com.akto.dto.testing.TestingEndpoints; +import com.akto.dto.type.SingleTypeInfo; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.Projections; +import com.mongodb.client.model.Sorts; + +public class UnauthenticatedEndpoint extends TestingEndpoints { + + private static int limit = 50; + private static final Logger logger = LoggerFactory.getLogger(UnauthenticatedEndpoint.class); + + @BsonIgnore + private List apiInfos; + + public UnauthenticatedEndpoint() { + super(Type.UNAUTHENTICATED, Operator.OR); + } + + public List getApiInfos() { + return apiInfos; + } + + public void setApiInfos(List apiInfos) { + this.apiInfos = apiInfos; + } + + @Override + public List returnApis() { + return this.apiInfos; + } + + @Override + public boolean containsApi(ApiInfoKey key) { + ApiInfo apiInfo = ApiInfoDao.instance.findOne(ApiInfoDao.getFilter(key)); + if (apiInfo == null) { + return false; + } + apiInfo.calculateActualAuth(); + if (apiInfo.getActualAuthType() == null || apiInfo.getActualAuthType().isEmpty()) { + return false; + } + Set authTypes = new HashSet<>(apiInfo.getActualAuthType()); + if (authTypes == null || authTypes.isEmpty()) { + return false; + } + if (authTypes.contains(AuthType.UNAUTHENTICATED)) { + return true; + } + return false; + } + + private static Bson createApiFilters(CollectionType type, ApiInfoKey api) { + + String prefix = getFilterPrefix(type); + + return Filters.and( + Filters.eq(prefix + SingleTypeInfo._URL, api.getUrl()), + Filters.eq(prefix + SingleTypeInfo._METHOD, api.getMethod().toString()), + Filters.in(SingleTypeInfo._COLLECTION_IDS, api.getApiCollectionId())); + + } + + public final static int UNAUTHENTICATED_GROUP_ID = 111_111_120; + + public static void updateCollections(){ + ApiCollectionUsers.reset(UNAUTHENTICATED_GROUP_ID); + + List apiCollections = ApiCollectionsDao.instance.findAll( + Filters.ne(ApiCollection._TYPE, ApiCollection.Type.API_GROUP.toString()), Projections.include("_id") + ); + + Bson unauthenticatedFilter = Filters.in( + ApiInfo.ALL_AUTH_TYPES_FOUND, + Collections.singletonList(Collections.singletonList(ApiInfo.AuthType.UNAUTHENTICATED)) + ); + + for(ApiCollection apiCollection: apiCollections){ + + int lastTimeStampRecorded = Context.now() + (5*60) ; + int apiCollectionId = apiCollection.getId(); + int skip = 0 ; + + // create instance of the conditions class + UnauthenticatedEndpoint unauthenticatedEndpoint = new UnauthenticatedEndpoint(); + logger.info(String.format("AccountId: %d Starting update unauthenticated data collection for %d ", + Context.accountId.get(), apiCollectionId)); + + while (true) { + Bson filterQ = Filters.and( + Filters.eq(ApiInfo.ID_API_COLLECTION_ID, apiCollectionId), + Filters.lt(ApiInfo.LAST_SEEN, lastTimeStampRecorded), + unauthenticatedFilter + ); + List apiInfosBatched = ApiInfoDao.instance.findAll( + filterQ, skip, limit, Sorts.descending(ApiInfo.LAST_SEEN), Projections.include( + "_id", ApiInfo.LAST_SEEN + ) + ); + + List apiInfoKeysTemp = new ArrayList<>(); + for(ApiInfo apiInfo: apiInfosBatched){ + apiInfoKeysTemp.add(apiInfo.getId()); + lastTimeStampRecorded = Math.min(lastTimeStampRecorded, apiInfo.getLastSeen()); + } + lastTimeStampRecorded += 2; + logger.info(String.format( + "AccountId: %d Running update unauthenticated data collection for %d endpoints: %d skip: %d", + Context.accountId.get(), apiCollectionId, apiInfoKeysTemp.size(), skip)); + unauthenticatedEndpoint.setApiInfos(apiInfoKeysTemp); + ApiCollectionUsers.addToCollectionsForCollectionId(Collections.singletonList(unauthenticatedEndpoint), UNAUTHENTICATED_GROUP_ID); + skip += limit; + + if(apiInfosBatched.size() < limit){ + logger.info(String.format("AccountId: %d Finished update unauthenticated data collection for %d ", + Context.accountId.get(), apiCollectionId)); + break; + } + } + } + + } + + + @Override + public Bson createFilters(CollectionType type) { + Set apiSet = new HashSet<>(returnApis()); + List apiFilters = new ArrayList<>(); + if (apiSet != null && !apiSet.isEmpty()) { + for (ApiInfoKey api : apiSet) { + apiFilters.add(createApiFilters(type, api)); + } + return Filters.or(apiFilters); + } + + return MCollection.noMatchFilter; + } +} diff --git a/libs/dao/src/main/java/com/akto/dto/traffic/SuspectSampleData.java b/libs/dao/src/main/java/com/akto/dto/traffic/SuspectSampleData.java new file mode 100644 index 0000000000..5ae5ae09e0 --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/traffic/SuspectSampleData.java @@ -0,0 +1,124 @@ +package com.akto.dto.traffic; + +import java.util.List; + +import org.bson.types.ObjectId; + +import com.akto.dto.type.URLMethods.Method; + +public class SuspectSampleData { + + ObjectId id; + public static final String SOURCE_IPS = "sourceIPs"; + List sourceIPs; + public static final String API_COLLECTION_ID = "apiCollectionId"; + int apiCollectionId; + String url; + Method method; + public static final String _SAMPLE = "sample"; + String sample; + public static final String _DISCOVERED = "discovered"; + int discovered; + /* + * we retrospectively match all sus-samples' url + * with the urls present in the db to match them. + */ + public final static String MATCHING_URL = "matchingUrl"; + String matchingUrl; + + /* + * Corresponding filter which marked it sus. + */ + String filterId; + + public SuspectSampleData() { + } + + public SuspectSampleData(List sourceIPs, int apiCollectionId, String url, Method method, String sample, + int discovered, String filterId) { + this.sourceIPs = sourceIPs; + this.apiCollectionId = apiCollectionId; + this.url = url; + this.method = method; + this.sample = sample; + this.discovered = discovered; + /* + * By default we assume that the attacker was trying to attack home url. + */ + this.matchingUrl = "/"; + this.filterId = filterId; + } + + public List getSourceIPs() { + return sourceIPs; + } + + public void setSourceIPs(List sourceIPs) { + this.sourceIPs = sourceIPs; + } + + public int getApiCollectionId() { + return apiCollectionId; + } + + public void setApiCollectionId(int apiCollectionId) { + this.apiCollectionId = apiCollectionId; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public Method getMethod() { + return method; + } + + public void setMethod(Method method) { + this.method = method; + } + + public String getSample() { + return sample; + } + + public void setSample(String sample) { + this.sample = sample; + } + + public int getDiscovered() { + return discovered; + } + + public void setDiscovered(int discovered) { + this.discovered = discovered; + } + + public String getMatchingUrl() { + return matchingUrl; + } + + public void setMatchingUrl(String matchingUrl) { + this.matchingUrl = matchingUrl; + } + + public ObjectId getId() { + return id; + } + + public void setId(ObjectId id) { + this.id = id; + } + + public String getFilterId() { + return filterId; + } + + public void setFilterId(String filterId) { + this.filterId = filterId; + } + +} \ No newline at end of file diff --git a/libs/dao/src/main/java/com/akto/dto/traffic_collector/TrafficCollectorInfo.java b/libs/dao/src/main/java/com/akto/dto/traffic_collector/TrafficCollectorInfo.java new file mode 100644 index 0000000000..2f8743c895 --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/traffic_collector/TrafficCollectorInfo.java @@ -0,0 +1,53 @@ +package com.akto.dto.traffic_collector; + +public class TrafficCollectorInfo { + + private String id; + public static final String RUNTIME_ID = "runtimeId"; + private String runtimeId; + public static final String START_TIME = "startTime"; + private int startTime; + public static final String LAST_HEARTBEAT = "lastHeartbeat"; + private int lastHeartbeat; + + public TrafficCollectorInfo() {} + + public TrafficCollectorInfo(String id, String runtimeId, int startTime, int lastHeartbeat) { + this.id = id; + this.runtimeId = runtimeId; + this.startTime = startTime; + this.lastHeartbeat = lastHeartbeat; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public int getStartTime() { + return startTime; + } + + public void setStartTime(int startTime) { + this.startTime = startTime; + } + + public int getLastHeartbeat() { + return lastHeartbeat; + } + + public void setLastHeartbeat(int lastHeartbeat) { + this.lastHeartbeat = lastHeartbeat; + } + + public String getRuntimeId() { + return runtimeId; + } + + public void setRuntimeId(String runtimeId) { + this.runtimeId = runtimeId; + } +} diff --git a/libs/dao/src/main/java/com/akto/dto/traffic_collector/TrafficCollectorMetrics.java b/libs/dao/src/main/java/com/akto/dto/traffic_collector/TrafficCollectorMetrics.java new file mode 100644 index 0000000000..f855a9712e --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/traffic_collector/TrafficCollectorMetrics.java @@ -0,0 +1,78 @@ +package com.akto.dto.traffic_collector; + +import java.util.Map; + +public class TrafficCollectorMetrics { + private String id; + public static final String RUNTIME_ID = "runtimeId"; + private String runtimeId; + public static final String REQUESTS_COUNT_MAP_PER_MINUTE = "requestsCountMapPerMinute"; + private Map requestsCountMapPerMinute; + public static final String BUCKET_START_EPOCH = "bucketStartEpoch"; + private int bucketStartEpoch; + public static final String BUCKET_END_EPOCH = "bucketEndEpoch"; + private int bucketEndEpoch; + + + public TrafficCollectorMetrics(String id, String runtimeId, Map requestsCountMapPerMinute, int bucketStartEpoch, int bucketEndEpoch) { + this.id = id; + this.runtimeId = runtimeId; + this.requestsCountMapPerMinute = requestsCountMapPerMinute; + this.bucketStartEpoch = bucketStartEpoch; + this.bucketEndEpoch = bucketEndEpoch; + } + + public TrafficCollectorMetrics() { + } + + @Override + public String toString() { + return "TrafficCollectorMetrics{" + + "id='" + id + '\'' + + ", runtimeId='" + runtimeId + '\'' + + ", requestsCountMapPerMinute=" + requestsCountMapPerMinute + + ", bucketStartEpoch=" + bucketStartEpoch + + ", bucketEndEpoch=" + bucketEndEpoch + + '}'; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public Map getRequestsCountMapPerMinute() { + return requestsCountMapPerMinute; + } + + public void setRequestsCountMapPerMinute(Map requestsCountMapPerMinute) { + this.requestsCountMapPerMinute = requestsCountMapPerMinute; + } + + public int getBucketStartEpoch() { + return bucketStartEpoch; + } + + public void setBucketStartEpoch(int bucketStartEpoch) { + this.bucketStartEpoch = bucketStartEpoch; + } + + public int getBucketEndEpoch() { + return bucketEndEpoch; + } + + public void setBucketEndEpoch(int bucketEndEpoch) { + this.bucketEndEpoch = bucketEndEpoch; + } + + public String getRuntimeId() { + return runtimeId; + } + + public void setRuntimeId(String runtimeId) { + this.runtimeId = runtimeId; + } +} diff --git a/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java new file mode 100644 index 0000000000..53899ac03d --- /dev/null +++ b/libs/dao/src/main/java/com/akto/dto/traffic_metrics/RuntimeMetrics.java @@ -0,0 +1,52 @@ +package com.akto.dto.traffic_metrics; + +public class RuntimeMetrics { + + private String name; + private int timestamp; + private String instanceId; + private Double val; + + public RuntimeMetrics() { + } + + public RuntimeMetrics(String name, int timestamp, String instanceId, Double val) { + this.name = name; + this.timestamp = timestamp; + this.instanceId = instanceId; + this.val = val; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public int getTimestamp() { + return timestamp; + } + + public void setTimestamp(int timestamp) { + this.timestamp = timestamp; + } + + public String getInstanceId() { + return instanceId; + } + + public void setInstanceId(String instanceId) { + this.instanceId = instanceId; + } + + public Double getVal() { + return val; + } + + public void setVal(Double val) { + this.val = val; + } + +} diff --git a/libs/dao/src/main/java/com/akto/util/Constants.java b/libs/dao/src/main/java/com/akto/util/Constants.java index 8d2a5468ef..fe0c6eb0aa 100644 --- a/libs/dao/src/main/java/com/akto/util/Constants.java +++ b/libs/dao/src/main/java/com/akto/util/Constants.java @@ -13,6 +13,8 @@ private Constants() {} public static final int ONE_MONTH_TIMESTAMP = (60 * 60 * 24 * 30) ; + public static final int ONE_DAY_TIMESTAMP = ( 60 * 60 * 24 ); + public static final String AKTO_IGNORE_FLAG = "x-akto-ignore"; public static final String AKTO_ATTACH_FILE = "x-akto-attach-file"; public static final String AKTO_TOKEN_KEY = "x-akto-key"; diff --git a/libs/dao/src/main/java/com/akto/util/HttpRequestResponseUtils.java b/libs/dao/src/main/java/com/akto/util/HttpRequestResponseUtils.java index fc26bf7c05..9f7e414f63 100644 --- a/libs/dao/src/main/java/com/akto/util/HttpRequestResponseUtils.java +++ b/libs/dao/src/main/java/com/akto/util/HttpRequestResponseUtils.java @@ -1,5 +1,8 @@ package com.akto.util; +import com.akto.dao.context.Context; +import com.akto.dto.type.SingleTypeInfo; +import com.akto.types.CappedSet; import com.fasterxml.jackson.core.JsonProcessingException; import com.akto.util.grpc.ProtoBufUtils; import com.fasterxml.jackson.databind.ObjectMapper; @@ -22,6 +25,36 @@ public class HttpRequestResponseUtils { public static final String FORM_URL_ENCODED_CONTENT_TYPE = "application/x-www-form-urlencoded"; public static final String GRPC_CONTENT_TYPE = "application/grpc"; + public static List generateSTIsFromPayload(int apiCollectionId, String url, String method,String body, int responseCode) { + int now = Context.now(); + List singleTypeInfos = new ArrayList<>(); + Map> respFlattened = extractValuesFromPayload(body); + for (String param: respFlattened.keySet()) { + // values is basically the type + Set values = respFlattened.get(param); + if (values == null || values.isEmpty()) continue; + + ArrayList valuesList = new ArrayList<>(values); + String val = valuesList.get(0) == null ? null : valuesList.get(0).toString(); + SingleTypeInfo.SubType subType = findSubType(val); + SingleTypeInfo.ParamId paramId = new SingleTypeInfo.ParamId(url, method,responseCode, false, param, subType, apiCollectionId, false); + SingleTypeInfo singleTypeInfo = new SingleTypeInfo(paramId, new HashSet<>(), new HashSet<>(), 0, now, 0, new CappedSet<>(), SingleTypeInfo.Domain.ANY, Long.MAX_VALUE, Long.MIN_VALUE); + singleTypeInfos.add(singleTypeInfo); + } + + return singleTypeInfos; + } + + public static SingleTypeInfo.SubType findSubType(String val) { + if (val == null) return SingleTypeInfo.GENERIC; + if (val.equalsIgnoreCase("short") || val.equalsIgnoreCase("int")) return SingleTypeInfo.INTEGER_32; + if (val.equalsIgnoreCase("long")) return SingleTypeInfo.INTEGER_64; + if (val.equalsIgnoreCase("float") || val.equalsIgnoreCase("double")) return SingleTypeInfo.FLOAT; + if (val.equalsIgnoreCase("boolean")) return SingleTypeInfo.TRUE; + + return SingleTypeInfo.GENERIC; + } + public static Map> extractValuesFromPayload(String body) { if (body == null) return new HashMap<>(); if (body.startsWith("[")) body = "{\"json\": "+body+"}"; @@ -113,6 +146,10 @@ public static String jsonToFormUrlEncoded(String requestPayload) { for (String key : jsonObject.keySet()) { // Encode the key and value, and append them to the string builder try { + /* + * Taking this change from mini-runtime-release. + * Takes care of case where key is not present in json Object. + */ String tmp = encode(key) + "=" + encode(String.valueOf(jsonObject.get(key))) + "&"; formUrlEncoded.append(tmp); } catch (Exception e) { diff --git a/libs/utils/src/main/java/com/akto/log/LoggerMaker.java b/libs/utils/src/main/java/com/akto/log/LoggerMaker.java index 8f26a01eb4..e4d7e41769 100644 --- a/libs/utils/src/main/java/com/akto/log/LoggerMaker.java +++ b/libs/utils/src/main/java/com/akto/log/LoggerMaker.java @@ -7,14 +7,15 @@ import com.akto.dto.AccountSettings; import com.akto.dto.Config; import com.akto.dto.Log; +import com.akto.util.Constants; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; import com.mongodb.client.model.Filters; import com.mongodb.client.model.Projections; -import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; @@ -32,15 +33,16 @@ public class LoggerMaker { private final Class aClass; private static String slackWebhookUrl; + private static String slackCyborgWebhookUrl; public static final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); + private static final ExecutorService service = Executors.newFixedThreadPool(1); private static final DataActor dataActor = DataActorFactory.fetchInstance(); protected static final Logger internalLogger = LoggerFactory.getLogger(LoggerMaker.class); static { scheduler.scheduleAtFixedRate(new Runnable() { - @Override public void run() { try { @@ -56,6 +58,28 @@ public void run() { } } }, 0, 1, TimeUnit.MINUTES); + + scheduler.scheduleAtFixedRate(new Runnable() { + @Override + public void run() { + try { + if (slackCyborgWebhookUrl != null) { + return; + } + Config config = ConfigsDao.instance.findOne(Constants.ID, Config.SlackAlertCyborgConfig.CONFIG_ID); + if (config != null) { + Config.SlackAlertCyborgConfig slackCyborgWebhook = (Config.SlackAlertCyborgConfig) config; + if (slackCyborgWebhook != null && slackCyborgWebhook.getSlackWebhookUrl() != null + && !slackCyborgWebhook.getSlackWebhookUrl().isEmpty()) { + slackCyborgWebhookUrl = slackCyborgWebhook.getSlackWebhookUrl(); + internalLogger.info("found slack cyborg config"); + } + } + } catch (Exception e) { + internalLogger.error("error in getting slack cyborg config: " + e.toString()); + } + } + }, 2, 15, TimeUnit.MINUTES); } private static int logCount = 0; @@ -65,7 +89,7 @@ public void run() { private LogDb db; public enum LogDb { - TESTING,RUNTIME,DASHBOARD,BILLING, ANALYSER + TESTING,RUNTIME,DASHBOARD,BILLING, ANALYSER, DB_ABS, THREAT_DETECTION } private static AccountSettings accountSettings = null; @@ -130,6 +154,10 @@ protected static void sendToSlack(String err) { sendToSlack(slackWebhookUrl, err); } + protected static void sendToCyborgSlack(String err){ + sendToSlack(slackCyborgWebhookUrl, err); + } + protected String basicError(String err, LogDb db) { if(Context.accountId.get() != null){ err = String.format("%s\nAccount id: %d", err, Context.accountId.get()); @@ -145,10 +173,18 @@ protected String basicError(String err, LogDb db) { public void errorAndAddToDb(String err, LogDb db) { try { - basicError(err, db); + String finalError = basicError(err, db); if (db.equals(LogDb.BILLING) || db.equals(LogDb.DASHBOARD)) { sendToSlack(err); + } else if(LogDb.DB_ABS.equals(db)){ + service.submit(() -> { + try { + sendToCyborgSlack(finalError); + } catch (Exception e){ + internalLogger.error("Error in sending cyborg error logs %s" , e.getMessage()); + } + }); } } catch (Exception e) { e.printStackTrace(); @@ -231,6 +267,10 @@ private void insert(String info, String key, LogDb db) { case BILLING: BillingLogsDao.instance.insertOne(log); break; + // Add db for db-abs + case THREAT_DETECTION: + dataActor.insertProtectionLog(log); + break; default: break; } diff --git a/libs/utils/src/main/java/com/akto/onprem/Constants.java b/libs/utils/src/main/java/com/akto/onprem/Constants.java index 26b8e0096b..135ea85e73 100644 --- a/libs/utils/src/main/java/com/akto/onprem/Constants.java +++ b/libs/utils/src/main/java/com/akto/onprem/Constants.java @@ -100,6 +100,8 @@ public static void sendTestResults(ObjectId summaryId, Organization organization private static Config.SlackAlertUsageConfig slackAlertUsageConfig; private static int lastSlackAlertUsageRefreshTs = 0; + private static Config.SlackAlertCyborgConfig slackAlertCyborgConfig; + private static int lastSlackAlertCyborgRefreshTs = 0; public static Config.SlackAlertUsageConfig getSlackAlertUsageConfig() { slackAlertUsageConfig = getConfig(slackAlertUsageConfig, lastSlackAlertUsageRefreshTs, @@ -109,6 +111,14 @@ public static Config.SlackAlertUsageConfig getSlackAlertUsageConfig() { return slackAlertUsageConfig; } + public static Config.SlackAlertCyborgConfig getSlackAlertCyborgConfig() { + slackAlertCyborgConfig = getConfig(slackAlertCyborgConfig, lastSlackAlertCyborgRefreshTs, + Config.SlackAlertCyborgConfig.CONFIG_ID, + Config.SlackAlertCyborgConfig.class, Config.SlackAlertCyborgConfig::new, + config -> config.setSlackWebhookUrl("")); + return slackAlertCyborgConfig; + } + private static T getConfig(T config, int lastRefreshTs, String configId, Class type, Supplier defaultConfigSupplier, Consumer additionalConfigSetup) { int now = Context.now(); diff --git a/apps/api-runtime/src/main/java/com/akto/runtime/policies/AuthPolicy.java b/libs/utils/src/main/java/com/akto/runtime/policies/AuthPolicy.java similarity index 73% rename from apps/api-runtime/src/main/java/com/akto/runtime/policies/AuthPolicy.java rename to libs/utils/src/main/java/com/akto/runtime/policies/AuthPolicy.java index d78a991232..1335100cd5 100644 --- a/apps/api-runtime/src/main/java/com/akto/runtime/policies/AuthPolicy.java +++ b/libs/utils/src/main/java/com/akto/runtime/policies/AuthPolicy.java @@ -12,7 +12,6 @@ import org.slf4j.LoggerFactory; import java.util.*; -import static com.akto.runtime.utils.Utils.parseCookie; public class AuthPolicy { @@ -34,6 +33,25 @@ private static List findBearerBasicAuth(String header, String return new ArrayList<>(); } + public static Map parseCookie(List cookieList){ + Map cookieMap = new HashMap<>(); + if(cookieList==null)return cookieMap; + for (String cookieValues : cookieList) { + String[] cookies = cookieValues.split(";"); + for (String cookie : cookies) { + cookie=cookie.trim(); + String[] cookieFields = cookie.split("="); + boolean twoCookieFields = cookieFields.length == 2; + if (twoCookieFields) { + if(!cookieMap.containsKey(cookieFields[0])){ + cookieMap.put(cookieFields[0], cookieFields[1]); + } + } + } + } + return cookieMap; + } + public static boolean findAuthType(HttpResponseParams httpResponseParams, ApiInfo apiInfo, RuntimeFilter filter, List customAuthTypes) { Set> allAuthTypesFound = apiInfo.getAllAuthTypesFound(); if (allAuthTypesFound == null) allAuthTypesFound = new HashSet<>(); @@ -48,12 +66,6 @@ public static boolean findAuthType(HttpResponseParams httpResponseParams, ApiInf Map cookieMap = parseCookie(cookieList); Set authTypes = new HashSet<>(); - BasicDBObject flattenedPayload = null; - try{ - BasicDBObject basicDBObject = BasicDBObject.parse(httpResponseParams.getRequestParams().getPayload()); - flattenedPayload = JSONUtils.flattenWithDots(basicDBObject); - } catch (Exception e){ - } for (CustomAuthType customAuthType : customAuthTypes) { Set headerAndCookieKeys = new HashSet<>(); @@ -69,17 +81,29 @@ public static boolean findAuthType(HttpResponseParams httpResponseParams, ApiInf // Find custom auth type in payload List customAuthTypePayloadKeys = customAuthType.getPayloadKeys(); - if(flattenedPayload != null && !flattenedPayload.isEmpty() && !customAuthTypePayloadKeys.isEmpty() && flattenedPayload.keySet().containsAll(customAuthTypePayloadKeys)){ - authTypes.add(ApiInfo.AuthType.CUSTOM); - break; + if(!customAuthTypePayloadKeys.isEmpty() ){ + BasicDBObject flattenedPayload = null; + try{ + BasicDBObject basicDBObject = BasicDBObject.parse(httpResponseParams.getRequestParams().getPayload()); + flattenedPayload = JSONUtils.flattenWithDots(basicDBObject); + } catch (Exception e){ + } + if(flattenedPayload != null && !flattenedPayload.isEmpty() && flattenedPayload.keySet().containsAll(customAuthTypePayloadKeys)){ + authTypes.add(ApiInfo.AuthType.CUSTOM); + break; + } } } // find bearer or basic tokens in any header for (String header : headers.keySet()) { List headerValues = headers.getOrDefault(header, new ArrayList<>()); - for (String value : headerValues) { - authTypes.addAll(findBearerBasicAuth(header, value)); + if (!headerValues.isEmpty()) { + for (String value : headerValues) { + authTypes.addAll(findBearerBasicAuth(header, value)); + } + } else { + authTypes.addAll(findBearerBasicAuth(header, "")); } } diff --git a/libs/utils/src/main/java/com/akto/usage/UsageMetricCalculator.java b/libs/utils/src/main/java/com/akto/usage/UsageMetricCalculator.java index b0716cb921..a0f59b1998 100644 --- a/libs/utils/src/main/java/com/akto/usage/UsageMetricCalculator.java +++ b/libs/utils/src/main/java/com/akto/usage/UsageMetricCalculator.java @@ -1,8 +1,10 @@ package com.akto.usage; import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -23,9 +25,12 @@ import com.akto.dto.usage.UsageMetric; import com.akto.dto.usage.metadata.ActiveAccounts; import com.akto.log.LoggerMaker; +import com.akto.util.Constants; import com.akto.util.enums.GlobalEnums.YamlTemplateSource; import com.google.gson.Gson; import com.mongodb.client.model.Filters; +import com.mongodb.client.model.Projections; + import org.bson.conversions.Bson; public class UsageMetricCalculator { @@ -43,24 +48,30 @@ public static Set getDemos() { return demos; } - private static int lastDeactivatedFetched = 0; + /* + * to handle multiple accounts using static maps. + */ + private static Map lastDeactivatedFetchedMap = new HashMap<>(); private static final int REFRESH_INTERVAL = 60 * 2; // 2 minutes. - private static Set deactivatedCollections = new HashSet<>(); + private static Map> deactivatedCollectionsMap = new HashMap<>(); public static Set getDeactivated() { - if ((lastDeactivatedFetched + REFRESH_INTERVAL) >= Context.now()) { - return deactivatedCollections; + int accountId = Context.accountId.get(); + if (lastDeactivatedFetchedMap.containsKey(accountId) + && (lastDeactivatedFetchedMap.get(accountId) + REFRESH_INTERVAL) >= Context.now() + && deactivatedCollectionsMap.containsKey(accountId)) { + return deactivatedCollectionsMap.get(accountId); } - deactivatedCollections = getDeactivatedLatest(); - lastDeactivatedFetched = Context.now(); - return deactivatedCollections; + deactivatedCollectionsMap.put(accountId, getDeactivatedLatest()); + lastDeactivatedFetchedMap.put(accountId, Context.now()); + return deactivatedCollectionsMap.get(accountId); } public static Set getDeactivatedLatest(){ List deactivated = ApiCollectionsDao.instance - .findAll(Filters.eq(ApiCollection._DEACTIVATED, true)); + .findAll(Filters.eq(ApiCollection._DEACTIVATED, true), Projections.include(Constants.ID)); Set deactivatedIds = new HashSet<>( deactivated.stream().map(apiCollection -> apiCollection.getId()).collect(Collectors.toList())); diff --git a/libs/utils/src/main/java/com/akto/utils/CustomAuthUtil.java b/libs/utils/src/main/java/com/akto/utils/CustomAuthUtil.java new file mode 100644 index 0000000000..5e579307b1 --- /dev/null +++ b/libs/utils/src/main/java/com/akto/utils/CustomAuthUtil.java @@ -0,0 +1,189 @@ +package com.akto.utils; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.ArrayList; +import java.util.Collections; + +import com.mongodb.BasicDBObject; +import com.mongodb.client.model.*; +import org.bson.conversions.Bson; + +import com.akto.dao.ApiInfoDao; +import com.akto.dao.SampleDataDao; +import com.akto.dao.SingleTypeInfoDao; +import com.akto.dto.ApiInfo; +import com.akto.dto.CustomAuthType; +import com.akto.dto.HttpRequestParams; +import com.akto.dto.HttpResponseParams; +import com.akto.dto.traffic.SampleData; +import com.akto.dto.type.SingleTypeInfo; +import com.akto.log.LoggerMaker; +import com.akto.log.LoggerMaker.LogDb; +import com.akto.runtime.parser.SampleParser; +import com.akto.runtime.policies.AuthPolicy; +import com.akto.util.Constants; +import com.google.gson.Gson; + +import static com.akto.dto.ApiInfo.ALL_AUTH_TYPES_FOUND; + +public class CustomAuthUtil { + + private static final LoggerMaker loggerMaker = new LoggerMaker(CustomAuthUtil.class, LogDb.DASHBOARD); + + public static Bson getFilters(ApiInfo apiInfo) { + return Filters.and( + Filters.eq(SingleTypeInfo._URL, apiInfo.getId().getUrl()), + Filters.eq(SingleTypeInfo._METHOD, apiInfo.getId().getMethod().name()), + Filters.eq(SingleTypeInfo._RESPONSE_CODE, -1), + Filters.eq(SingleTypeInfo._API_COLLECTION_ID, apiInfo.getId().getApiCollectionId())); + } + + private static Set unauthenticatedTypes = new HashSet<>(Collections.singletonList(ApiInfo.AuthType.UNAUTHENTICATED)); + + final static Gson gson = new Gson(); + + private static HttpResponseParams createResponseParamsFromSTI(List list) { + + HttpResponseParams responseParams = new HttpResponseParams(); + HttpRequestParams requestParams = new HttpRequestParams(); + Map> headers = new HashMap<>(); + Map payloadKeys = new HashMap<>(); + for(SingleTypeInfo sti: list){ + if(sti.isIsHeader()){ + List values = new ArrayList<>(); + if(sti.getValues()!=null && sti.getValues().getElements()!=null){ + values = new ArrayList<>(sti.getValues().getElements()); + } + headers.put(sti.getParam(), values); + } else if(!sti.getIsUrlParam()) { + payloadKeys.put(sti.getParam(), ""); + } + } + String payloadJsonString = "{}"; + try { + payloadJsonString = gson.toJson(payloadKeys); + } catch(Exception e){ + payloadJsonString = "{}"; + } + requestParams.setHeaders(headers); + requestParams.setPayload(payloadJsonString); + responseParams.requestParams = requestParams; + return responseParams; + } + + public static List> calcAuth(List apiInfos, List customAuthTypes, boolean printLogs){ + List> apiInfosUpdates = new ArrayList<>(); + if (customAuthTypes == null) { + customAuthTypes = new ArrayList<>(); + } + loggerMaker.infoAndAddToDb("Read " + apiInfos.size() + " api infos for custom auth types " + customAuthTypes.size(), LogDb.DASHBOARD); + for (ApiInfo apiInfo : apiInfos) { + + if (apiInfo.getId().getApiCollectionId() == -86954493) { + loggerMaker.infoAndAddToDb("initial auth types " + apiInfo.getId().getUrl() + " authtypes: " + apiInfo.getAllAuthTypesFound(), LogDb.DASHBOARD); + } + + Set> authTypes = apiInfo.getAllAuthTypesFound(); + authTypes.remove(new HashSet<>()); + authTypes.remove(unauthenticatedTypes); + + SampleData sampleData = SampleDataDao.instance.fetchAllSampleDataForApi( + apiInfo.getId().getApiCollectionId(), + apiInfo.getId().getUrl(), apiInfo.getId().getMethod()); + boolean sampleProcessed = false; + ApiInfo.ApiInfoKey id = apiInfo.getId(); + if (sampleData != null && sampleData.getSamples() != null && !sampleData.getSamples().isEmpty()) { + if (printLogs && (id.getApiCollectionId() == 1991121043 || id.getApiCollectionId() == -1134993740)) { + loggerMaker.infoAndAddToDb("Found sample data for " + sampleData.getId().toString() ,LogDb.DASHBOARD); + } + for (String sample : sampleData.getSamples()) { + try { + HttpResponseParams httpResponseParams = SampleParser.parseSampleMessage(sample); + AuthPolicy.findAuthType(httpResponseParams, apiInfo, null, customAuthTypes); + if (id.getApiCollectionId() == -86954493) { + loggerMaker.infoAndAddToDb("auth types sample data processed: url " + id.getUrl() + " authtypes: " + apiInfo.getAllAuthTypesFound(), LogDb.DASHBOARD); + } + sampleProcessed = true; + } catch (Exception e) { + loggerMaker.errorAndAddToDb(e, "Unable to parse sample data for custom auth setup job"); + } + } + } + + if (!sampleProcessed) { + continue; + // List list = SingleTypeInfoDao.instance.findAll(getFilters(apiInfo)); + // try { + // if(list!=null && !list.isEmpty()){ + // HttpResponseParams httpResponseParams = createResponseParamsFromSTI(list); + // if (printLogs && (id.getApiCollectionId() == 1991121043 || id.getApiCollectionId() == -1134993740)) { + // loggerMaker.infoAndAddToDb("Headers for " + apiInfo.getId().toString() + ": " + httpResponseParams.requestParams.getHeaders() ,LogDb.DASHBOARD); + // } + // AuthPolicy.findAuthType(httpResponseParams, apiInfo, null, customAuthTypes); + // if (id.getApiCollectionId() == -86954493) { + // loggerMaker.infoAndAddToDb("auth types sample data unprocessed: url " + id.getUrl() + " authtypes: " + apiInfo.getAllAuthTypesFound(), LogDb.DASHBOARD); + // } + // } + // } catch (Exception e) { + // loggerMaker.errorAndAddToDb(e, "Unable to parse STIs for custom auth setup job"); + // } + } + + if (printLogs && (id.getApiCollectionId() == 1991121043 || id.getApiCollectionId() == -1134993740)) { + loggerMaker.infoAndAddToDb("auth types for endpoint post processing " + apiInfo.getId().getUrl() + " " + apiInfo.getId().getMethod() + " : " + apiInfo.getAllAuthTypesFound()); + } + + UpdateOneModel update = new UpdateOneModel<>( + ApiInfoDao.getFilter(apiInfo.getId()), + Updates.set(ALL_AUTH_TYPES_FOUND, apiInfo.getAllAuthTypesFound()), + new UpdateOptions().upsert(false)); + apiInfosUpdates.add(update); + + } + loggerMaker.infoAndAddToDb("Finished processing " + apiInfos.size() + " api infos for custom auth type", LogDb.DASHBOARD); + return apiInfosUpdates; + + } + + public static void customAuthTypeUtil(List customAuthTypes) { + + List> apiInfosUpdates = new ArrayList<>(); + + int skip = 0; + int limit = 1000; + boolean fetchMore = false; + do { + fetchMore = false; + List apiInfos = ApiInfoDao.instance.findAll(new BasicDBObject(), skip, limit, + Sorts.descending(Constants.ID)); + + apiInfosUpdates.addAll(calcAuth(apiInfos, customAuthTypes, false)); + + if (apiInfos.size() == limit) { + skip += limit; + fetchMore = true; + } + + } while (fetchMore); + + if (apiInfosUpdates.size() > 0) { + ApiInfoDao.instance.getMCollection().bulkWrite(apiInfosUpdates); + } + } + + public static void resetAllCustomAuthTypes() { + + /* + * 1. remove custom auth type from all entries. + * 2. remove unauthenticated auth type from all entries since on reset, + * auth type should be calculated again. + */ + ApiInfoDao.instance.updateMany(new BasicDBObject(), + Updates.pull(ALL_AUTH_TYPES_FOUND + ".$[]", new BasicDBObject().append("$in", + new String[] { ApiInfo.AuthType.CUSTOM.name(), ApiInfo.AuthType.UNAUTHENTICATED.name() }))); + } +}