Skip to content

Commit

Permalink
Merge pull request #1797 from akto-api-security/hotfix/fix_rbac
Browse files Browse the repository at this point in the history
collection wise rbac
  • Loading branch information
notshivansh authored Dec 17, 2024
2 parents 1a9bd48 + 4f5ff2b commit 11a254f
Show file tree
Hide file tree
Showing 55 changed files with 1,505 additions and 397 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,4 @@ https:
**/data-kafka-data
**/data-zoo-data
**/data-zoo-logs
**/bin
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,9 @@
import com.akto.util.Pair;
import org.bson.conversions.Bson;

import com.akto.DaoInit;
import com.akto.action.observe.Utils;
import com.akto.dao.*;
import com.akto.billing.UsageMetricUtils;
import com.akto.dao.billing.OrganizationsDao;
import com.akto.dao.context.Context;
import com.akto.dto.billing.FeatureAccess;
import com.akto.dto.usage.MetricTypes;
Expand All @@ -24,7 +22,7 @@
import com.akto.dto.ApiInfo.ApiInfoKey;
import com.akto.dto.testing.CustomTestingEndpoints;
import com.akto.dto.CollectionConditions.ConditionUtils;
import com.akto.dto.billing.Organization;
import com.akto.dto.rbac.UsersCollectionsList;
import com.akto.dto.type.SingleTypeInfo;
import com.akto.listener.RuntimeListener;
import com.akto.log.LoggerMaker;
Expand All @@ -40,16 +38,16 @@
import com.mongodb.client.model.Projections;
import com.mongodb.client.model.Updates;
import com.mongodb.BasicDBObject;
import com.mongodb.ConnectionString;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
import com.mongodb.client.model.Sorts;
import com.mongodb.client.model.UnwindOptions;
import com.mongodb.client.model.UpdateOptions;
import com.opensymphony.xwork2.Action;

public class ApiCollectionsAction extends UserAction {

private static final LoggerMaker loggerMaker = new LoggerMaker(ApiCollectionsAction.class);
private static final LoggerMaker loggerMaker = new LoggerMaker(ApiCollectionsAction.class, LogDb.DASHBOARD);

List<ApiCollection> apiCollections = new ArrayList<>();
Map<Integer,Integer> testedEndpointsMaps = new HashMap<>();
Expand Down Expand Up @@ -173,6 +171,14 @@ public String fetchAllCollectionsBasic() {
Projections.computed(ApiCollection.URLS_COUNT, new BasicDBObject("$size", new BasicDBObject("$ifNull", Arrays.asList("$urls", Collections.emptyList())))),
Projections.include(ApiCollection.ID, ApiCollection.NAME, ApiCollection.HOST_NAME, ApiCollection._TYPE, ApiCollection.USER_ENV_TYPE, ApiCollection._DEACTIVATED,ApiCollection.START_TS, ApiCollection.AUTOMATED)
)));

try {
List<Integer> collectionIds = UsersCollectionsList.getCollectionsIdForUser(Context.userId.get(), Context.accountId.get());
if(collectionIds != null) {
pipeLine.add(Aggregates.match(Filters.in(Constants.ID, collectionIds)));
}
} catch(Exception e){
}
MongoCursor<BasicDBObject> cursor = ApiCollectionsDao.instance.getMCollection().aggregate(pipeLine, BasicDBObject.class).cursor();
while(cursor.hasNext()){
try {
Expand Down Expand Up @@ -260,6 +266,27 @@ public String createCollection() {
this.apiCollections = new ArrayList<>();
this.apiCollections.add(apiCollection);

try {
int userId = Context.userId.get();
int accountId = Context.accountId.get();

/*
* Since admin has all access, we don't update any collections for them.
*/
RBACDao.instance.getMCollection().updateOne(
Filters.and(
Filters.eq(RBAC.USER_ID, userId),
Filters.eq(RBAC.ACCOUNT_ID, accountId),
Filters.ne(RBAC.ROLE, RBAC.Role.ADMIN.getName())
),
Updates.addToSet(RBAC.API_COLLECTIONS_ID, apiCollection.getId()),
new UpdateOptions().upsert(false)
);

UsersCollectionsList.deleteCollectionIdsFromCache(userId, accountId);
} catch(Exception e){
}

ActivitiesDao.instance.insertActivity("Collection created", "new Collection " + this.collectionName + " created");

return Action.SUCCESS.toUpperCase();
Expand Down Expand Up @@ -332,6 +359,13 @@ public String deleteMultipleCollections() {
ApiCollectionUsers.updateApiCollection(collection.getConditions(), collection.getId());
}

try {
int userId = Context.userId.get();
int accountId = Context.accountId.get();
UsersCollectionsList.deleteCollectionIdsFromCache(userId, accountId);
} catch (Exception e) {
}

if(hasApiGroups) {
addActionError("API groups cannot be deleted!");
return ERROR.toUpperCase();
Expand Down Expand Up @@ -572,6 +606,14 @@ public String fetchRiskScoreInfo(){
Map<Integer, Double> riskScoreMap = new HashMap<>();
List<Bson> pipeline = new ArrayList<>();

try {
List<Integer> collectionIds = UsersCollectionsList.getCollectionsIdForUser(Context.userId.get(), Context.accountId.get());
if(collectionIds != null) {
pipeline.add(Aggregates.match(Filters.in(SingleTypeInfo._COLLECTION_IDS, collectionIds)));
}
} catch(Exception e){
}

/*
* Use Unwind to unwind the collectionIds field resulting in a document for each collectionId in the collectionIds array
*/
Expand Down Expand Up @@ -715,9 +757,22 @@ public String updateEnvType(){
FindOneAndUpdateOptions updateOptions = new FindOneAndUpdateOptions();
updateOptions.upsert(false);

/*
* User can only update collections which they have access to.
* so we remove entries which are not in the collections access list.
*/
try {
List<Integer> collectionIds = UsersCollectionsList.getCollectionsIdForUser(Context.userId.get(), Context.accountId.get());
if(collectionIds != null) {
apiCollectionIds.removeIf(apiCollectionId -> !collectionIds.contains(apiCollectionId));
filter = Filters.in(Constants.ID, apiCollectionIds);
}
} catch(Exception e){
}

UpdateResult result = ApiCollectionsDao.instance.getMCollection().updateMany(filter,
Updates.set(ApiCollection.USER_ENV_TYPE,envType)
);;
);
if(result == null){
return Action.ERROR.toUpperCase();
}
Expand All @@ -728,6 +783,38 @@ public String updateEnvType(){
return Action.ERROR.toUpperCase();
}

public Map<String, List<Integer>> userCollectionMap = new HashMap<>();

public String updateUserCollections() {
int accountId = Context.accountId.get();

for(Map.Entry<String, List<Integer>> entry : userCollectionMap.entrySet()) {
int userId = Integer.parseInt(entry.getKey());
Set<Integer> apiCollections = new HashSet<>(entry.getValue());

RBACDao.updateApiCollectionAccess(userId, accountId, apiCollections);
UsersCollectionsList.deleteCollectionIdsFromCache(userId, accountId);
}

return SUCCESS.toUpperCase();
}


HashMap<Integer, List<Integer>> usersCollectionList;
public String getAllUsersCollections() {
int accountId = Context.accountId.get();
this.usersCollectionList = RBACDao.instance.getAllUsersCollections(accountId);

return SUCCESS.toUpperCase();
}

public void setUserCollectionMap(Map<String, List<Integer>> userCollectionMap) {
this.userCollectionMap = userCollectionMap;
}

public HashMap<Integer, List<Integer>> getUsersCollectionList() {
return this.usersCollectionList;
}
public String editCollectionName() {
if(!isValidApiCollectionName()){
return ERROR.toUpperCase();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import com.akto.dto.*;
import com.akto.dto.data_types.Conditions;
import com.akto.dto.data_types.Predicate;
import com.akto.dto.rbac.UsersCollectionsList;
import com.akto.dto.traffic.Key;
import com.akto.dto.traffic.SampleData;
import com.akto.dto.type.SingleTypeInfo;
Expand Down Expand Up @@ -1043,7 +1044,15 @@ public String resetDataType() {
List<SingleTypeInfo.ParamId> idsToDelete = new ArrayList<>();
do {
idsToDelete = new ArrayList<>();
cursor = SensitiveSampleDataDao.instance.getMCollection().find(filterSsdQ).projection(Projections.exclude(SensitiveSampleData.SAMPLE_DATA)).skip(currMarker).limit(BATCH_SIZE).cursor();
Bson collectionFilter = Filters.empty();
try {
List<Integer> collectionIds = UsersCollectionsList.getCollectionsIdForUser(Context.userId.get(), Context.accountId.get());
if(collectionIds != null) {
collectionFilter = Filters.in(SingleTypeInfo._COLLECTION_IDS, collectionIds);
}
} catch(Exception e){
}
cursor = SensitiveSampleDataDao.instance.getMCollection().find(Filters.and(filterSsdQ, collectionFilter)).projection(Projections.exclude(SensitiveSampleData.SAMPLE_DATA)).skip(currMarker).limit(BATCH_SIZE).cursor();
currMarker += BATCH_SIZE;
dataPoints = 0;
loggerMaker.infoAndAddToDb("processing batch: " + currMarker, LogDb.DASHBOARD);
Expand Down
52 changes: 10 additions & 42 deletions apps/dashboard/src/main/java/com/akto/action/DashboardAction.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,12 @@
import com.akto.dao.billing.OrganizationsDao;
import com.akto.dto.*;
import com.akto.dto.billing.Organization;
import com.akto.dto.rbac.UsersCollectionsList;
import com.akto.dto.test_run_findings.TestingRunIssues;
import com.akto.dto.type.SingleTypeInfo;
import com.akto.listener.RuntimeListener;
import com.akto.util.enums.GlobalEnums;
import com.mongodb.client.model.*;
import org.bouncycastle.util.test.Test;
import org.bson.conversions.Bson;

import com.akto.dao.context.Context;
Expand All @@ -29,7 +30,6 @@

public class DashboardAction extends UserAction {

private Map<Integer,Integer> riskScoreCountMap = new HashMap<>();
private int startTimeStamp;
private int endTimeStamp;
private Map<Integer,List<IssueTrendType>> issuesTrendMap = new HashMap<>() ;
Expand All @@ -39,36 +39,7 @@ public class DashboardAction extends UserAction {
private Map<String,ConnectionInfo> integratedConnectionsInfo = new HashMap<>();
private String connectionSkipped;

private static final LoggerMaker loggerMaker = new LoggerMaker(DashboardAction.class);

private static boolean isBetween(int low, int high, double score){
return (score >= low && score < high) ;
}

// function for getting number of api in between multiple ranges to show trend on dashboard pagecalculateRiskValueForSeverity
public String fetchRiskScoreCountMap(){
Map<Integer, Integer> riskScoreCounts = new HashMap<>();
MongoCursor<ApiInfo> apiCursor = ApiInfoDao.instance.getMCollection().find().projection(Projections.include("_id", ApiInfo.RISK_SCORE)).cursor();
while(apiCursor.hasNext()){
try {
ApiInfo apiInfo = apiCursor.next();
float riskScore = apiInfo.getRiskScore();
if (isBetween(0, 3, riskScore)) {
riskScoreCounts.put(3, riskScoreCounts.getOrDefault(3,0) + 1);
} else if (isBetween(3, 4, riskScore)) {
riskScoreCounts.put(4, riskScoreCounts.getOrDefault(4,0) + 1);
} else {
riskScoreCounts.put(5, riskScoreCounts.getOrDefault(5,0) + 1);
}
}catch (Exception e) {
loggerMaker.errorAndAddToDb("error in calculating risk score count " + e.toString(), LogDb.DASHBOARD);
}
}

this.riskScoreCountMap = riskScoreCounts;

return Action.SUCCESS.toUpperCase();
}
private static final LoggerMaker loggerMaker = new LoggerMaker(DashboardAction.class, LogDb.DASHBOARD);

Set<Integer> deactivatedCollections = UsageMetricCalculator.getDeactivated();

Expand Down Expand Up @@ -157,6 +128,13 @@ public String fetchCriticalIssuesTrend(){
List<Bson> pipeline = new ArrayList<>();
pipeline.add(Aggregates.match(issuesFilter));

try {
List<Integer> collectionIds = UsersCollectionsList.getCollectionsIdForUser(Context.userId.get(), Context.accountId.get());
if(collectionIds != null) {
pipeline.add(Aggregates.match(Filters.in(SingleTypeInfo._COLLECTION_IDS, collectionIds)));
}
} catch(Exception e){
}
pipeline.add(Aggregates.project(Projections.computed(dayOfYearFloat, new BasicDBObject("$divide", new Object[]{"$" + TestingRunIssues.CREATION_TIME, 86400}))));

pipeline.add(Aggregates.project(Projections.computed(dayOfYear, new BasicDBObject("$floor", new Object[]{"$" + dayOfYearFloat}))));
Expand Down Expand Up @@ -305,12 +283,6 @@ public String updateUsernameAndOrganization() {
return Action.SUCCESS.toUpperCase();
}

private String userEmail;

public Map<Integer, Integer> getRiskScoreCountMap() {
return riskScoreCountMap;
}

public int getStartTimeStamp() {
return startTimeStamp;
}
Expand Down Expand Up @@ -406,8 +378,4 @@ public String getOrganization() {
public void setOrganization(String organization) {
this.organization = organization;
}

public void setUserEmail(String userEmail) {
this.userEmail = userEmail;
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package com.akto.action;

import com.akto.dao.SingleTypeInfoDao;
import com.akto.dao.context.Context;
import com.akto.dto.rbac.UsersCollectionsList;
import com.akto.dto.type.SingleTypeInfo;
import com.akto.log.LoggerMaker;
import com.mongodb.client.MongoCursor;
Expand Down Expand Up @@ -33,6 +35,13 @@ public String fetchParamsStatus() {
String computedFieldName = "computedValue";

pipeline.add(Aggregates.match(Filters.gt(SingleTypeInfo._UNIQUE_COUNT,0)));
try {
List<Integer> collectionIds = UsersCollectionsList.getCollectionsIdForUser(Context.userId.get(), Context.accountId.get());
if(collectionIds != null) {
pipeline.add(Aggregates.match(Filters.in(SingleTypeInfo._COLLECTION_IDS, collectionIds)));
}
} catch(Exception e){
}

Bson projections = Projections.fields(
Projections.include(
Expand Down
5 changes: 4 additions & 1 deletion apps/dashboard/src/main/java/com/akto/action/TeamAction.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import com.akto.dto.PendingInviteCode;
import com.akto.dto.RBAC;
import com.akto.dto.RBAC.Role;
import com.akto.dto.rbac.UsersCollectionsList;
import com.akto.dto.User;
import com.akto.log.LoggerMaker;
import com.akto.log.LoggerMaker.LogDb;
Expand Down Expand Up @@ -105,7 +106,6 @@ public String fetchTeamData() {
}
return SUCCESS.toUpperCase();
}

private enum ActionType {
REMOVE_USER,
UPDATE_USER_ROLE
Expand Down Expand Up @@ -161,6 +161,8 @@ public String performAction(ActionType action, String reqUserRole) {
RBACDao.instance.updateOne(
filterRbac,
Updates.set(RBAC.ROLE, Role.valueOf(reqUserRole)));
RBACDao.instance.deleteUserEntryFromCache(new Pair<>(userDetails.getId(), accId));
UsersCollectionsList.deleteCollectionIdsFromCache(userDetails.getId(), accId);
return Action.SUCCESS.toUpperCase();
}else{
addActionError("User doesn't have access to modify this role.");
Expand All @@ -180,6 +182,7 @@ public String performAction(ActionType action, String reqUserRole) {
break;
}
RBACDao.instance.deleteUserEntryFromCache(new Pair<>(userDetails.getId(), accId));
UsersCollectionsList.deleteCollectionIdsFromCache(userDetails.getId(), accId);
return Action.SUCCESS.toUpperCase();
}

Expand Down
4 changes: 4 additions & 0 deletions apps/dashboard/src/main/java/com/akto/action/UserAction.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.akto.action;

import com.akto.dao.context.Context;
import com.akto.dto.User;
import com.opensymphony.xwork2.ActionSupport;
import org.apache.struts2.interceptor.SessionAware;
Expand All @@ -17,6 +18,9 @@ public Map<String, Object> getSession() {
public void setSession(Map<String, Object> session) {
this.session = session;
this.user = (User)(session.get("user"));
if (this.user != null) {
Context.userId.set(this.user.getId());
}
}

public User getSUser() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,11 +71,15 @@ public String fetchAktoGptConfig(){
if(aktoGptConfig == null) {
aktoGptConfig = upsertAktoConfig(apiCollectionId, DEFAULT_STATE);
}
String collectionName = ApiCollectionsDao.instance.findOne(new BasicDBObject("_id", apiCollectionId)).getName();
currentState = Collections.singletonList(new BasicDBObject("id", aktoGptConfig.getId())
.append("state", aktoGptConfig.getState().toString())
.append("collectionName", collectionName != null ? collectionName : String.valueOf(apiCollectionId)));
logger.debug("Fetching AktoGptConfig for collectionId: {}, {}", apiCollectionId, currentState);
ApiCollection apiCollection = ApiCollectionsDao.instance.findOne(new BasicDBObject("_id", apiCollectionId));
if (apiCollection != null) {
String collectionName = apiCollection.getName();
currentState = Collections.singletonList(new BasicDBObject("id", aktoGptConfig.getId())
.append("state", aktoGptConfig.getState().toString())
.append("collectionName",
collectionName != null ? collectionName : String.valueOf(apiCollectionId)));
logger.debug("Fetching AktoGptConfig for collectionId: {}, {}", apiCollectionId, currentState);
}
}
return SUCCESS.toUpperCase();
}
Expand Down
Loading

0 comments on commit 11a254f

Please sign in to comment.