diff --git a/.github/tvarit/conf/prod/grafana.ini.template b/.github/tvarit/conf/prod/grafana.ini.template
index f12e0ec9ff..6ffac191aa 100644
--- a/.github/tvarit/conf/prod/grafana.ini.template
+++ b/.github/tvarit/conf/prod/grafana.ini.template
@@ -115,6 +115,20 @@ password = """"""
from_address =
from_name =
+#################################### Azure AD OAuth #######################
+[auth.azuread]
+name = Maxion Login
+enabled =
+;allow_sign_up = false
+client_id =
+client_secret =
+scopes = openid email profile
+auth_url = https://login.microsoftonline.com/58288272-c24c-4c0c-bc60-dc0cbadd0866/oauth2/v2.0/authorize
+token_url = https://login.microsoftonline.com/58288272-c24c-4c0c-bc60-dc0cbadd0866/oauth2/v2.0/token
+skip_org_role_sync = true
+;allowed_domains =
+;allowed_groups =
+
#################################### Unified Alerting ####################
[unified_alerting]
#Enable the Unified Alerting sub-system and interface. When enabled we'll migrate all of your alert rules and notification channels to the new system. New alert rules will be created and your notification channels will be converted into an Alertmanager configuration. Previous data is preserved to enable backwards compatibility but new data is removed.```
diff --git a/.github/tvarit/dashboard-deployment.py b/.github/tvarit/dashboard-deployment.py
new file mode 100644
index 0000000000..8dc1411db5
--- /dev/null
+++ b/.github/tvarit/dashboard-deployment.py
@@ -0,0 +1,127 @@
+import requests
+import json
+import subprocess
+def find_existing_folder(api_url, api_key, folder_name):
+ headers = {
+ "Authorization": f"Bearer {api_key}",
+ "Accept": "application/json",
+ "Content-Type": "application/json"
+ }
+
+ response = requests.get(f"{api_url}/folders", headers=headers)
+
+ if response.status_code == 200:
+ folders = response.json()
+ for folder in folders:
+ if folder.get("title") == folder_name:
+ return folder.get("id")
+
+ # If no matching folder is found, return None
+ return None
+ else:
+ print(f"Failed to fetch folders. Status Code: {response.status_code}")
+ return None
+
+def replace_in_dict(obj, search, replacement):
+ if isinstance(obj, dict):
+ for key in list(obj.keys()):
+ obj[key] = replace_in_dict(obj[key], search, replacement)
+ return obj
+ elif isinstance(obj, list):
+ return [replace_in_dict(item, search, replacement) for item in obj]
+ elif isinstance(obj, str):
+ return obj.replace(search, replacement)
+ else:
+ return obj
+
+print('Settting up variables')
+
+maxion_grafana_url = "https://maxion.tvarit.com/api"
+cloud_grafana_url = "https://cloud.tvarit.com/api"
+test_grafana_url = "https://test.tvarit.com/api"
+grafana_url = ""
+
+aws_cli_command = "aws secretsmanager get-secret-value --secret-id /credentials/grafana-user/access-key --output text --query SecretString"
+
+try:
+ # Run the AWS CLI command and capture its output
+ result = subprocess.run(aws_cli_command, shell=True, text=True, capture_output=True, check=True)
+ secret_json = json.loads(result.stdout)
+
+ data = secret_json
+except subprocess.CalledProcessError as e:
+ # Handle any errors or exceptions here
+ print("AWS CLI command failed with error:")
+ print(e.stderr)
+
+print('###################################Starting Deployment###################################')
+
+data_test = data.get("Test", {})
+
+for key in data_test.keys():
+ print('Deploying in ',key)
+ if key in ['Alcar', 'Gienanth', 'Procast', 'Voit', 'Doktas', 'ESW', 'Endurance', 'Foehl', 'Mahle', 'Mbusch']:
+ grafana_url = cloud_grafana_url
+ else:
+ grafana_url = maxion_grafana_url
+ org_data = data_test[key]
+ # org_data['api'] = {f'TEST_API_KEY_{key}'}
+ headers = {
+ "Authorization": f"Bearer {org_data['api']}"
+ }
+
+ data_prod = data.get("Prod", {}).get(key, {})
+ api = data_prod['api']
+ headers2 = {
+ "Authorization": f"Bearer {api}",
+ "Accept": "application/json",
+ "Content-Type": "application/json",
+ }
+ for folder in ['Production Dashboards PsQ', 'Production Dashboards PsE']:
+ source_folder = find_existing_folder(test_grafana_url, org_data['api'], folder)
+ destination_folder = find_existing_folder(grafana_url, api, folder)
+ if source_folder and destination_folder:
+ print(source_folder, destination_folder)
+ response = requests.get(f"{test_grafana_url}/search", params={"folderIds": [source_folder]}, headers=headers)
+ dashboards_response = response.json()
+ print(dashboards_response)
+ for dashboard in dashboards_response:
+ dashboard_uid = dashboard["uid"]
+ dashboard_title = dashboard["title"]
+
+ # Add functionality for versioning
+ print(f"Dashboard '{dashboard_title}' has a new version.")
+ # print(dashboard)
+ # Step 5: Retrieve Dashboard JSON
+ response = requests.get(f"{test_grafana_url}/dashboards/uid/{dashboard_uid}", headers=headers)
+ # print(response)
+
+ dashboard_json = response.json()
+
+ for key in org_data.keys():
+ if key in data_prod:
+ replace_in_dict(dashboard_json, org_data[key], data_prod[key])
+ # print("Dashboard JSON")
+ # print(dashboard_json)
+ dashboard = dashboard_json.get("dashboard", {})
+ del dashboard["uid"]
+ # dashboard["version"] = "1"
+ del dashboard["id"]
+ if 'meta' in dashboard_json:
+ del dashboard_json['meta']
+ # print(dashboard)
+ dashboard_json["dashboard"] = dashboard
+ dashboard_json["overwrite"] = True
+ dashboard_json["folderId"] = destination_folder
+
+ print(f'Uploading to ${grafana_url}')
+ response = requests.post(f"{grafana_url}/dashboards/db", headers=headers2, json=dashboard_json)
+ if response.status_code == 200:
+ print("Dashboard creation/updating successful!")
+ else:
+ print(f"Error {response.status_code}: {response.content.decode('utf-8')}")
+ else:
+ print(f'Could not find folder {folder} in org {key}')
+
+
+
diff --git a/.github/tvarit/deploy_to_production.sh b/.github/tvarit/deploy_to_production.sh
index b0ebd101b0..08c306b174 100755
--- a/.github/tvarit/deploy_to_production.sh
+++ b/.github/tvarit/deploy_to_production.sh
@@ -74,6 +74,14 @@ sed -i "s##${SMTP_USER}#g" grafana.ini
sed -i "s##${SMTP_PASSWORD}#g" grafana.ini
sed -i "s##Tvarit AI Platform#g" grafana.ini
+if [ "${PREFIX}" == "maxion" ]; then
+sed -i "s##true#g" grafana.ini
+sed -i "s##${MAXION_CLIENT_ID}#g" grafana.ini
+sed -i "s##${MAXION_CLIENT_SECRET}#g" grafana.ini
+else
+sed -i "s##false#g" grafana.ini
+fi
+
cp cloudwatch.json.template cloudwatch.json
sed -i "s##${PREFIX}.tvarit.com#g" cloudwatch.json
diff --git a/.github/tvarit/deploy_to_staging.sh b/.github/tvarit/deploy_to_staging.sh
index 032c855d7a..9f9490a57d 100755
--- a/.github/tvarit/deploy_to_staging.sh
+++ b/.github/tvarit/deploy_to_staging.sh
@@ -95,6 +95,17 @@ sed -i "s##${SMTP_USER}#g" grafana.ini
sed -i "s##${SMTP_PASSWORD}#g" grafana.ini
sed -i "s##[BETA] Tvarit AI Platform#g" grafana.ini
+if [ "${PREFIX}" == "maxion" ]; then
+sed -i "s##true#g" grafana.ini
+sed -i "s##${MAXION_CLIENT_ID}#g" grafana.ini
+sed -i "s##${MAXION_CLIENT_SECRET}#g" grafana.ini
+else
+sed -i "s##false#g" grafana.ini
+fi
+
+echo "${MAXION_CLIENT_ID}"
+cat "grafana.ini"
+
cp cloudwatch.json.template cloudwatch.json
sed -i "s##next-${PREFIX}.tvarit.com#g" cloudwatch.json
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index b79d174e9c..68bf6a4913 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -16,6 +16,8 @@ jobs:
SMTP_HOST: ${{ secrets.SMTP_HOST }}
SMTP_USER: ${{ secrets.SMTP_USER }}
SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
+ MAXION_CLIENT_ID: ${{secrets.MAXION_CLIENT_ID}}
+ MAXION_CLIENT_SECRET: ${{secrets.MAXION_CLIENT_SECRET}}
steps:
- uses: actions/checkout@v2
- run: |
@@ -34,6 +36,8 @@ jobs:
SMTP_HOST: ${{ secrets.SMTP_HOST }}
SMTP_USER: ${{ secrets.SMTP_USER }}
SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
+ MAXION_CLIENT_ID: ${{secrets.MAXION_CLIENT_ID}}
+ MAXION_CLIENT_SECRET: ${{secrets.MAXION_CLIENT_SECRET}}
steps:
- uses: actions/checkout@v2
- run: |
diff --git a/.github/workflows/test-to-prod-deploy.yml b/.github/workflows/test-to-prod-deploy.yml
new file mode 100644
index 0000000000..71c46ea3bb
--- /dev/null
+++ b/.github/workflows/test-to-prod-deploy.yml
@@ -0,0 +1,25 @@
+name: Deploy Dashboards
+
+on:
+ workflow_dispatch:
+
+jobs:
+ deploy_to_production:
+ runs-on: ubuntu-latest
+ environment: development
+ env:
+ AWS_DEFAULT_REGION: eu-central-1
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+
+ - name: Run Deployment script
+ run: |
+ python .github/tvarit/dashboard-deployment.py
diff --git a/pkg/login/social/azuread_oauth.go b/pkg/login/social/azuread_oauth.go
index 9ce173b65f..3f000411db 100644
--- a/pkg/login/social/azuread_oauth.go
+++ b/pkg/login/social/azuread_oauth.go
@@ -69,8 +69,8 @@ func (s *SocialAzureAD) UserInfo(client *http.Client, token *oauth2.Token) (*Bas
return nil, errors.New("error getting user info: no email found in access token")
}
- role := extractRole(claims, s.autoAssignOrgRole)
- logger.Debug("AzureAD OAuth: extracted role", "email", email, "role", role)
+ /* role := extractRole(claims, s.autoAssignOrgRole)
+ logger.Debug("AzureAD OAuth: extracted role", "email", email, "role", role) */
groups, err := extractGroups(client, claims, token)
if err != nil {
@@ -87,7 +87,7 @@ func (s *SocialAzureAD) UserInfo(client *http.Client, token *oauth2.Token) (*Bas
Name: claims.Name,
Email: email,
Login: email,
- Role: string(role),
+ Role: "", /* string(role), */
Groups: groups,
}, nil
}
@@ -117,7 +117,7 @@ func extractEmail(claims azureClaims) string {
return claims.Email
}
-
+/*
func extractRole(claims azureClaims, autoAssignRole string) models.RoleType {
if len(claims.Roles) == 0 {
return models.RoleType(autoAssignRole)
@@ -146,7 +146,7 @@ func hasRole(roles []string, role models.RoleType) bool {
}
return false
}
-
+*/
type getAzureGroupRequest struct {
SecurityEnabledOnly bool `json:"securityEnabledOnly"`
}