Skip to content

Commit

Permalink
Merge branch 'master' into feature/prioritize_templates_job
Browse files Browse the repository at this point in the history
  • Loading branch information
ankush-jain-akto committed Jan 15, 2025
2 parents 268020b + f79f9cd commit 21c9f83
Show file tree
Hide file tree
Showing 41 changed files with 2,351 additions and 173 deletions.
5 changes: 2 additions & 3 deletions .github/workflows/prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,6 @@ jobs:
ECR_REPOSITORY: akto-api-security
IMAGE_TAG_1: local
IMAGE_TAG_2: ${{ github.event.inputs.release_version }}_local
IMAGE_TAG_3: latest
run: |
docker buildx create --use
# Build a docker container and push it to DockerHub
Expand Down Expand Up @@ -134,8 +133,8 @@ jobs:
fi
if [[ "${{ github.event.inputs.threat_detection_backend}}" == "true" ]]; then
cd apps/threat-detection-backend
docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-threat-detection-backend:$IMAGE_TAG . --push
cd ../threat-detection-backend
docker buildx build --platform linux/arm64/v8,linux/amd64 -t $ECR_REGISTRY/akto-threat-detection-backend:$IMAGE_TAG_1 -t $ECR_REGISTRY/akto-threat-detection-backend:$IMAGE_TAG_2 . --push
fi
- name: Configure AWS Credentials for ECR
uses: aws-actions/configure-aws-credentials@v1
Expand Down
8 changes: 3 additions & 5 deletions .github/workflows/staging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,13 @@ name: Staging
on:
push:
branches: [master]
pull_request:
pull_request_review:
types: [ submitted ]
workflow_dispatch:

concurrency:
group: "staging${{ github.head_ref }}"
cancel-in-progress: true

jobs:
build:
if: (github.event.pull_request.base.ref == 'master' && github.event.review.state == 'approved') || (github.event_name == 'workflow_dispatch')
runs-on: ubuntu-22.04
outputs:
IMAGE_TAG: ${{ steps.docker_tag.outputs.IMAGE_TAG }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,7 @@ public static void executeMeta1(Utility utility, User user, HttpServletRequest r
userDetails.append("expired", organization.checkExpirationWithAktoSync());
userDetails.append("hotjarSiteId", organization.getHotjarSiteId());
userDetails.append("planType", organization.getplanType());
userDetails.append("trialMsg", organization.gettrialMsg());
}

if (versions.length > 2) {
Expand Down
63 changes: 62 additions & 1 deletion apps/dashboard/src/main/java/com/akto/action/ReportAction.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.akto.action;

import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Executors;
Expand All @@ -10,6 +11,11 @@
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;

import com.akto.dao.testing.sources.TestReportsDao;
import com.akto.dto.testing.sources.TestReports;
import com.mongodb.MongoCommandException;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.Updates;
import org.apache.struts2.ServletActionContext;
import org.bson.types.ObjectId;
import org.json.JSONObject;
Expand All @@ -32,10 +38,45 @@ public class ReportAction extends UserAction {
private String reportUrl;
private String pdf;
private String status;
private boolean firstPollRequest;

private static final LoggerMaker loggerMaker = new LoggerMaker(ReportAction.class);

public String downloadReportPDF() {
if(reportUrl == null || reportUrl.isEmpty()) {
status = "ERROR";
addActionError("Report URL cannot be empty");
return ERROR.toUpperCase();
}

String reportUrlId;
try {
String path = new URL(reportUrl).getPath();
String[] segments = path.split("/");
reportUrlId = segments[segments.length - 1];
} catch (Exception e) {
status = "ERROR";
addActionError("Report URL cannot be empty");
return ERROR.toUpperCase();
}

if(!ObjectId.isValid(reportUrlId)) {
status = "ERROR";
addActionError("Report URL is invalid");
return ERROR.toUpperCase();
}

ObjectId reportUrlIdObj = new ObjectId(reportUrlId);

if(firstPollRequest) {
TestReports testReport = TestReportsDao.instance.findOne(Filters.eq("_id", reportUrlIdObj));
if(testReport != null && (testReport.getPdfReportString() != null && !testReport.getPdfReportString().isEmpty())) {
status = "COMPLETED";
pdf = testReport.getPdfReportString();
return SUCCESS.toUpperCase();
}
}

if (reportId == null) {
// Initiate PDF generation

Expand Down Expand Up @@ -89,6 +130,22 @@ public String downloadReportPDF() {
if (status.equals("COMPLETED")) {
loggerMaker.infoAndAddToDb("Pdf download status for report id - " + reportId + " completed. Attaching pdf in response ", LogDb.DASHBOARD);
pdf = node.get("base64PDF").textValue();
try {
TestReportsDao.instance.updateOne(Filters.eq("_id", reportUrlIdObj), Updates.set(TestReports.PDF_REPORT_STRING, pdf));
} catch(Exception e) {
loggerMaker.errorAndAddToDb("Error: " + e.getMessage() + ", while updating report binary for reportId: " + reportId, LogDb.DASHBOARD);
if (e instanceof MongoCommandException) {
MongoCommandException mongoException = (MongoCommandException) e;
if (mongoException.getCode() == 17420) {
addActionError("The report is too large to save. Please reduce its size and try again.");
} else {
addActionError("A database error occurred while saving the report. Try again later.");
}
} else {
addActionError("An error occurred while updating the report in DB. Please try again.");
}
status = "ERROR";
}
}
} catch (Exception e) {
loggerMaker.errorAndAddToDb(e, "Error while polling pdf download for report id - " + reportId, LogDb.DASHBOARD);
Expand Down Expand Up @@ -146,4 +203,8 @@ public String getStatus() {
public void setStatus(String status) {
this.status = status;
}

public void setFirstPollRequest(boolean firstPollRequest) {
this.firstPollRequest = firstPollRequest;
}
}
24 changes: 21 additions & 3 deletions apps/dashboard/src/main/java/com/akto/action/SignupAction.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import com.akto.notifications.slack.NewUserJoiningAlert;
import com.akto.notifications.slack.SlackAlerts;
import com.akto.notifications.slack.SlackSender;
import com.akto.usage.UsageMetricCalculator;
import com.akto.util.http_request.CustomHttpRequest;
import com.akto.utils.Auth0;
import com.akto.utils.GithubLogin;
Expand Down Expand Up @@ -548,9 +549,14 @@ public String registerViaOkta() throws IOException{
String username = userInfo.get("preferred_username").toString();

SignupInfo.OktaSignupInfo oktaSignupInfo= new SignupInfo.OktaSignupInfo(accessToken, username);

RBAC.Role defaultRole = RBAC.Role.MEMBER;
if(UsageMetricCalculator.isRbacFeatureAvailable(accountId)){
defaultRole = RBAC.Role.GUEST;
}

shouldLogin = "true";
createUserAndRedirect(email, username, oktaSignupInfo, accountId, Config.ConfigType.OKTA.toString(), RBAC.Role.MEMBER);
createUserAndRedirect(email, username, oktaSignupInfo, accountId, Config.ConfigType.OKTA.toString(), defaultRole);
code = "";
} catch (Exception e) {
loggerMaker.errorAndAddToDb("Error while signing in via okta sso \n" + e.getMessage(), LogDb.DASHBOARD);
Expand Down Expand Up @@ -659,7 +665,13 @@ public String registerViaAzure() throws Exception{
shouldLogin = "true";
logger.info("Successful signing with Azure Idp for: "+ useremail);
SignupInfo.SamlSsoSignupInfo signUpInfo = new SignupInfo.SamlSsoSignupInfo(username, useremail, Config.ConfigType.AZURE);
createUserAndRedirect(useremail, username, signUpInfo, this.accountId, Config.ConfigType.AZURE.toString(), RBAC.Role.MEMBER);

RBAC.Role defaultRole = RBAC.Role.MEMBER;
if(UsageMetricCalculator.isRbacFeatureAvailable(this.accountId)){
defaultRole = RBAC.Role.GUEST;
}

createUserAndRedirect(useremail, username, signUpInfo, this.accountId, Config.ConfigType.AZURE.toString(), defaultRole);
} catch (Exception e1) {
loggerMaker.errorAndAddToDb("Error while signing in via azure sso \n" + e1.getMessage(), LogDb.DASHBOARD);
servletResponse.sendRedirect("/login");
Expand Down Expand Up @@ -708,7 +720,13 @@ public String registerViaGoogleSamlSso() throws IOException{

shouldLogin = "true";
SignupInfo.SamlSsoSignupInfo signUpInfo = new SignupInfo.SamlSsoSignupInfo(username, userEmail, Config.ConfigType.GOOGLE_SAML);
createUserAndRedirect(userEmail, username, signUpInfo, this.accountId, Config.ConfigType.GOOGLE_SAML.toString(), RBAC.Role.MEMBER);

RBAC.Role defaultRole = RBAC.Role.MEMBER;
if(UsageMetricCalculator.isRbacFeatureAvailable(this.accountId)){
defaultRole = RBAC.Role.GUEST;
}

createUserAndRedirect(userEmail, username, signUpInfo, this.accountId, Config.ConfigType.GOOGLE_SAML.toString(), defaultRole);
} catch (Exception e1) {
loggerMaker.errorAndAddToDb("Error while signing in via google workspace sso \n" + e1.getMessage(), LogDb.DASHBOARD);
servletResponse.sendRedirect("/login");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public String fetchTeamData() {
for(Object obj: users) {
BasicDBObject userObj = (BasicDBObject) obj;
RBAC rbac = userToRBAC.get(userObj.getInt("id"));
String status = rbac == null ? Role.MEMBER.getName() : rbac.getRole().getName();
String status = (rbac == null || rbac.getRole() == null) ? Role.MEMBER.getName() : rbac.getRole().getName();
userObj.append("role", status);
try {
String login = userObj.getString(User.LOGIN);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -614,7 +614,7 @@ private Map<String, Integer> getCountMapForQueryMode(ObjectId testingRunResultSu
Map<String, Integer> resultantMap = new HashMap<>();

List<Bson> filterList = prepareTestRunResultsFilters(testingRunResultSummaryId, queryMode);
int count = (int) TestingRunResultDao.instance.count(Filters.and(filterList));
int count = VulnerableTestingRunResultDao.instance.countFromDb(Filters.and(filterList), queryMode.equals(QueryMode.VULNERABLE));
resultantMap.put(queryMode.toString(), count);

return resultantMap;
Expand Down Expand Up @@ -720,8 +720,8 @@ public String fetchTestingRunResults() {

timeNow = Context.now();
Bson filters = testingRunResultFilters.isEmpty() ? Filters.empty() : Filters.and(testingRunResultFilters);
this.testingRunResults = TestingRunResultDao.instance
.fetchLatestTestingRunResultWithCustomAggregations(filters, pageLimit, skip, sortStage);
this.testingRunResults = VulnerableTestingRunResultDao.instance
.fetchLatestTestingRunResultWithCustomAggregations(filters, pageLimit, skip, sortStage, testingRunResultSummaryId, queryMode.equals(QueryMode.VULNERABLE));
loggerMaker.infoAndAddToDb("[" + (Context.now() - timeNow) + "] Fetched testing run results of size: " + testingRunResults.size(), LogDb.DASHBOARD);

timeNow = Context.now();
Expand Down Expand Up @@ -778,13 +778,25 @@ public String fetchVulnerableTestRunResults() {
try {
testingRunResultSummaryId = new ObjectId(testingRunResultSummaryHexId);
Bson filterForReport = com.akto.action.testing.Utils.createFiltersForTestingReport(reportFilterList);
boolean isStoredInVulnerableCollection = VulnerableTestingRunResultDao.instance.isStoredInVulnerableCollection(testingRunResultSummaryId, true);
Bson filters = Filters.and(
Filters.eq(TestingRunResult.TEST_RUN_RESULT_SUMMARY_ID, testingRunResultSummaryId),
Filters.eq(TestingRunResult.VULNERABLE, true),
filterForReport
);
List<TestingRunResult> testingRunResultList = new ArrayList<>();
if(isStoredInVulnerableCollection){
filters = Filters.and(
Filters.eq(TestingRunResult.TEST_RUN_RESULT_SUMMARY_ID, testingRunResultSummaryId),
filterForReport
);
testingRunResultList = VulnerableTestingRunResultDao.instance.findAll(filters, skip, 50, null);
}else{
testingRunResultList = TestingRunResultDao.instance.findAll(filters, skip, 50, null);
}


List<TestingRunResult> testingRunResultList = TestingRunResultDao.instance.findAll(filters, skip, 50, null);

// Map<String, String> sampleDataVsCurlMap = new HashMap<>();
// for (TestingRunResult runResult: testingRunResultList) {
// WorkflowTest workflowTest = runResult.getWorkflowTest();
Expand Down Expand Up @@ -848,7 +860,7 @@ public static String getNodeResultLastMessage(String message) {

public String fetchTestRunResultDetails() {
ObjectId testingRunResultId = new ObjectId(testingRunResultHexId);
this.testingRunResult = TestingRunResultDao.instance.findOne("_id", testingRunResultId);
this.testingRunResult = VulnerableTestingRunResultDao.instance.findOneWithComparison(Filters.eq(Constants.ID, testingRunResultId), null);
List<GenericTestResult> runResults = new ArrayList<>();

for (GenericTestResult testResult: this.testingRunResult.getTestResults()) {
Expand All @@ -868,7 +880,7 @@ public String fetchTestRunResultDetails() {

public String fetchIssueFromTestRunResultDetails() {
ObjectId testingRunResultId = new ObjectId(testingRunResultHexId);
TestingRunResult result = TestingRunResultDao.instance.findOne(Constants.ID, testingRunResultId);
TestingRunResult result = VulnerableTestingRunResultDao.instance.findOneWithComparison(Filters.eq(Constants.ID, testingRunResultId), null);
try {
if (result.isVulnerable()) {
// name = category
Expand Down Expand Up @@ -1132,12 +1144,14 @@ public void run() {
Context.accountId.set(accountId);
try {
ObjectId summaryObjectId = new ObjectId(testingRunResultSummaryHexId);
List<TestingRunResult> testingRunResults = TestingRunResultDao.instance.findAll(
boolean isStoredInVulnerableCollection = VulnerableTestingRunResultDao.instance.isStoredInVulnerableCollection(summaryObjectId, true);
List<TestingRunResult> testingRunResults = VulnerableTestingRunResultDao.instance.findAll(
Filters.and(
Filters.eq(TestingRunResult.TEST_RUN_RESULT_SUMMARY_ID, summaryObjectId),
vulnerableFilter
),
Projections.include(TestingRunResult.API_INFO_KEY, TestingRunResult.TEST_SUB_TYPE)
Projections.include(TestingRunResult.API_INFO_KEY, TestingRunResult.TEST_SUB_TYPE),
isStoredInVulnerableCollection
);

if(testingRunResults.isEmpty()){
Expand Down Expand Up @@ -1182,10 +1196,18 @@ public void run() {
);

// update testing run results, by setting them isIgnored true
TestingRunResultDao.instance.updateMany(
Filters.in(Constants.ID, ignoredResults),
Updates.set(TestingRunResult.IS_IGNORED_RESULT, true)
);
if(isStoredInVulnerableCollection){
VulnerableTestingRunResultDao.instance.updateMany(
Filters.in(Constants.ID, ignoredResults),
Updates.set(TestingRunResult.IS_IGNORED_RESULT, true)
);
}else{
TestingRunResultDao.instance.updateMany(
Filters.in(Constants.ID, ignoredResults),
Updates.set(TestingRunResult.IS_IGNORED_RESULT, true)
);
}

} catch (Exception e) {
e.printStackTrace();
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
package com.akto.action.testing;

import org.apache.commons.lang3.StringUtils;

import com.akto.action.UserAction;
import com.akto.dao.testing.RemediationsDao;
import com.akto.dto.testing.Remediation;
import com.akto.util.Constants;

public class TestTemplatesAction extends UserAction {

public String execute(){
throw new IllegalStateException("Not implemented");
}

String testId;
String remediation;
public String fetchRemediationInfo() {

if (StringUtils.isEmpty(testId)) {
addActionError("testId is empty");
return ERROR.toUpperCase();
}

Remediation remediationObj = RemediationsDao.instance.findOne(Constants.ID, testId);

if (remediationObj == null) {
this.remediation = "";
} else {
this.remediation = remediationObj.getRemediationText();
}

return SUCCESS.toUpperCase();
}

public String getTestId() {
return this.testId;
}

public void setTestId(String testId) {
this.testId = testId;
}

public String getRemediation() {
return this.remediation;
}

public void setRemediation(String remediation) {
this.remediation = remediation;
}


}
Loading

0 comments on commit 21c9f83

Please sign in to comment.