Skip to content

Commit

Permalink
Add 5G validator test code
Browse files Browse the repository at this point in the history
  • Loading branch information
denverwilliams committed Nov 24, 2023
1 parent 8758bce commit f3b0b76
Show file tree
Hide file tree
Showing 12 changed files with 257 additions and 33 deletions.
4 changes: 3 additions & 1 deletion sample-cnfs/sample_open5gs/cnf-testsuite.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ helm_directory: open5gs
release_name: open5gs
allowlist_helm_chart_container_names: []
#optional 5gcore tag
core_label: app.kubernetes.io/name=amf
amf_label: app.kubernetes.io/name=amf
smf_label: app.kubernetes.io/name=smf
upf_label: app.kubernetes.io/name=upf
amf_pod_name: open5gs-amf-ngap
mmc: '999'
mnc: '70'
Expand Down
2 changes: 1 addition & 1 deletion sample-cnfs/sample_open5gs_no_auth/cnf-testsuite.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ helm_directory: open5gs
release_name: open5gs
allowlist_helm_chart_container_names: []
#optional 5gcore tag
core_label: app.kubernetes.io/name=amf
amf_label: app.kubernetes.io/name=amf
amf_pod_name: open5gs-amf-ngap
mmc: '999'
mnc: '70'
Expand Down
6 changes: 3 additions & 3 deletions shard.lock
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ shards:

kubectl_client:
git: https://github.com/cnf-testsuite/kubectl_client.git
version: 1.0.4
version: 1.0.5

popcorn:
git: https://github.com/icyleaf/popcorn.git
Expand All @@ -66,15 +66,15 @@ shards:

readline:
git: https://github.com/crystal-lang/crystal-readline.git
version: 0.1.1+git.commit.add6679775d59c164e2db04f5116557180d04ad9
version: 0.1.1+git.commit.69ecf33d7cad5568d7d19333510cfd9d17cb1bbd

release_manager:
git: https://github.com/cnf-testsuite/release_manager.git
version: 0.1.0+git.commit.a1d7b3568d3112f737ab3ff4a7bae69a6b86970a

retriable:
git: https://github.com/sija/retriable.cr.git
version: 0.2.4
version: 0.2.5

sam:
git: https://github.com/vulk/sam.cr.git
Expand Down
2 changes: 1 addition & 1 deletion shard.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ dependencies:
version: ~> 1.0.0
kubectl_client:
github: cnf-testsuite/kubectl_client
version: ~> 1.0.4
version: ~> 1.0.5
cluster_tools:
github: cnf-testsuite/cluster_tools
version: ~> 1.0.0
Expand Down
12 changes: 9 additions & 3 deletions src/tasks/utils/config.cr
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,9 @@ module CNFManager
white_list_container_names: Array(String),
docker_insecure_registries: Array(String) | Nil,
#todo change this to an array of labels that capture all of 5g core nodes
core_label: String,
amf_label: String,
smf_label: String,
upf_label: String,
ric_label: String,
fiveG_core: NamedTuple(amf_pod_name: String,
mmc: String,
Expand Down Expand Up @@ -129,7 +131,9 @@ module CNFManager
apn: optional_key_as_string(config, "apn"),
emergency: core_emergency,
}
core = optional_key_as_string(config, "core_label")
core = optional_key_as_string(config, "amf_label")
smf = optional_key_as_string(config, "smf_label")
upf = optional_key_as_string(config, "upf_label")
ric = optional_key_as_string(config, "ric_label")
if helm_directory.empty?
working_chart_directory = "exported_chart"
Expand Down Expand Up @@ -208,7 +212,9 @@ module CNFManager
container_names: container_names,
white_list_container_names: white_list_container_names,
docker_insecure_registries: docker_insecure_registries,
core_label: core,
amf_label: core,
smf_label: smf,
upf_label: upf,
ric_label: ric,
fiveG_core: fiveG_core,
image_registry_fqdns: image_registry_fqdns,})
Expand Down
91 changes: 82 additions & 9 deletions src/tasks/utils/k8s_tshark.cr
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,49 @@ module K8sTshark
# ClusterTools.exec_by_node_bg("tshark -ni any -a duration:120 -Y nas_5gs.mm.type_id -T json 2>&1 | tee #{tshark_log_name}", node)
# Log.info { "after exec by node bg" }
# resp = tshark_log_name
resp = log_of_tshark_by_node(command, node, duration="120")
resp = log_of_tshark_by_node(command, node, duration)
else
resp = "label key:#{label_key} value: #{label_value} not found"
end
Log.info { "resp #{resp}" }
resp
end

def self.log_of_tshark_by_label_bg(command, label_key, label_value, duration="120") : String
Log.info { "log_of_tshark_by_label command label_key label value: #{command} #{label_key} #{label_value}" }
all_pods = KubectlClient::Get.pods_by_nodes(KubectlClient::Get.schedulable_nodes_list)
pods = KubectlClient::Get.pods_by_label(all_pods, label_key, label_value)
first_labeled_pod = pods[0]?
Log.info { "first_labeled_pod: #{first_labeled_pod}" }
if first_labeled_pod && first_labeled_pod.dig?("metadata", "name")
Log.info { "first_labeled_pod #{first_labeled_pod} metadata name: #{first_labeled_pod.dig?("metadata", "name")}" }
pod_name = first_labeled_pod.dig("metadata", "name")
Log.info { "pod_name: #{pod_name}" }
nodes = KubectlClient::Get.nodes_by_pod(first_labeled_pod)
node = nodes.first
#create a unique name for the log
# rnd = Random.new
# name_id = rnd.next_int
# tshark_log_name = "/tmp/tshark-#{name_id}.json"
# Log.info { "tshark_log_name #{tshark_log_name}" }
#
# #tshark -ni any -Y nas_5gs.mm.type_id -T json 2>&1 | tee hi.log
# #command= -ni any -Y nas_5gs.mm.type_id -T json
# #todo check if tshark running already to keep from saturating network
# #todo play with reducing default duration
# ClusterTools.exec_by_node_bg("tshark #{command} -a duration:#{duration} 2>&1 | tee #{tshark_log_name}", node)
# ClusterTools.exec_by_node_bg("tshark -ni any -a duration:120 -Y nas_5gs.mm.type_id -T json 2>&1 | tee #{tshark_log_name}", node)
# Log.info { "after exec by node bg" }
# resp = tshark_log_name
resp = log_of_tshark_by_node_bg(command, node, duration="120")
else
resp = "label key:#{label_key} value: #{label_value} not found"
end
Log.info { "resp #{resp}" }
resp
end


def self.log_of_tshark_by_node(command, node, duration="120") : String
Log.info { "log_of_tshark_by_node: command #{command}" }
#create a unique name for the log
Expand All @@ -48,6 +83,22 @@ module K8sTshark
tshark_log_name = "/tmp/tshark-#{name_id}.json"
Log.info { "log_of_tshark_by_node tshark_log_name #{tshark_log_name}" }

#tshark -ni any -Y nas_5gs.mm.type_id -T json 2>&1 | tee hi.log
#command= -ni any -Y nas_5gs.mm.type_id -T json
#todo check if tshark running already to keep from saturating network
ClusterTools.exec_by_node("tshark #{command} -a duration:#{duration} 2>&1 | tee #{tshark_log_name}", node)
Log.info { "after exec by node bg" }
tshark_log_name
end

def self.log_of_tshark_by_node_bg(command, node, duration="120") : String
Log.info { "log_of_tshark_by_node: command #{command}" }
#create a unique name for the log
rnd = Random.new
name_id = rnd.next_int.abs
tshark_log_name = "/tmp/tshark-#{name_id}.json"
Log.info { "log_of_tshark_by_node tshark_log_name #{tshark_log_name}" }

#tshark -ni any -Y nas_5gs.mm.type_id -T json 2>&1 | tee hi.log
#command= -ni any -Y nas_5gs.mm.type_id -T json
#todo check if tshark running already to keep from saturating network
Expand All @@ -56,23 +107,45 @@ module K8sTshark
tshark_log_name
end

def self.regex_tshark_log(regex, tshark_log_name)

def self.regex_tshark_log_scan(regex, tshark_log_name)
Log.info { "regex_tshark_log regex tshark_log_name: #{regex} #{tshark_log_name}" }
regex_found : Bool | Nil
resp = File.read("#{tshark_log_name}")
Log.info { "tshark_log_name resp: #{resp}" }
if resp
Log.info { "resp: #{resp}" }
if resp =~ regex
regex_found = true
else
regex_found = false
end
ret = resp.scan(regex)
else
Log.info { "file empty" }
ret = nil
end
Log.info { "#{regex}: #{ret}" }
ret
end

def self.regex_tshark_log_match(regex, tshark_log_name)
Log.info { "regex_tshark_log regex tshark_log_name: #{regex} #{tshark_log_name}" }
resp = File.read("#{tshark_log_name}")
Log.info { "tshark_log_name resp: #{resp}" }
if resp
Log.info { "resp: #{resp}" }
ret = resp =~ regex
else
Log.info { "file empty" }
ret = nil
end
Log.info { "#{regex}: #{ret}" }
ret
end

def self.regex_tshark_log(regex, tshark_log_name)
Log.info { "regex_tshark_log regex tshark_log_name: #{regex} #{tshark_log_name}" }
regex_found : Bool | Nil
if regex_tshark_log_match(regex, tshark_log_name)
regex_found = true
else
regex_found = false
end
Log.info { "#{regex}: #{regex_found}" }
regex_found
end

Expand Down
6 changes: 3 additions & 3 deletions src/tasks/utils/srsran.cr
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ module SRSRAN

def self.install(config)
Log.info {"Installing srsran"}
core = config.cnf_config[:core_label]?
core = config.cnf_config[:amf_label]?
Log.info { "core: #{core}" }
#todo use sane defaults (i.e. search for amf, upf, etc in pod names) if no 5gcore labels are present
amf_pod_name = config.cnf_config[:fiveG_core][:amf_pod_name]?
Expand All @@ -34,8 +34,8 @@ module SRSRAN
emergency = config.cnf_config[:fiveG_core][:emergency]?
core_key : String = ""
core_value : String = ""
core_key = config.cnf_config[:core_label].split("=").first if core
core_value = config.cnf_config[:core_label].split("=").last if core
core_key = config.cnf_config[:amf_label].split("=").first if core
core_value = config.cnf_config[:amf_label].split("=").last if core
if core
all_pods = KubectlClient::Get.pods_by_nodes(KubectlClient::Get.schedulable_nodes_list)
ueran_pods = KubectlClient::Get.pods_by_label(all_pods, "app.kubernetes.io/name", "ueransim-gnb")
Expand Down
4 changes: 3 additions & 1 deletion src/tasks/utils/task.cr
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,9 @@ module CNFManager
container_names: [{"name" => "", "rolling_update_test_tag" => ""}],
white_list_container_names: [""],
docker_insecure_registries: [] of String,
core_label: "",
amf_label: "",
smf_label: "",
upf_label: "",
ric_label: "",
fiveG_core: {amf_pod_name: "",
mmc: "",
Expand Down
6 changes: 3 additions & 3 deletions src/tasks/utils/ueransim.cr
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ module UERANSIM

def self.install(config)
Log.info {"Installing ueransim with 5g config"}
core = config.cnf_config[:core_label]?
core = config.cnf_config[:amf_label]?
Log.info { "core: #{core}" }
#todo use sane defaults (i.e. search for amf, upf, etc in pod names) if no 5gcore labels are present
amf_pod_name = config.cnf_config[:fiveG_core][:amf_pod_name]?
Expand All @@ -40,8 +40,8 @@ module UERANSIM
emergency = config.cnf_config[:fiveG_core][:emergency]?
core_key : String = ""
core_value : String = ""
core_key = config.cnf_config[:core_label].split("=").first if core
core_value = config.cnf_config[:core_label].split("=").last if core
core_key = config.cnf_config[:amf_label].split("=").first if core
core_value = config.cnf_config[:amf_label].split("=").last if core
if core
all_pods = KubectlClient::Get.pods_by_nodes(KubectlClient::Get.schedulable_nodes_list)
ueran_pods = KubectlClient::Get.pods_by_label(all_pods, "app.kubernetes.io/name", "ueransim-gnb")
Expand Down
115 changes: 115 additions & 0 deletions src/tasks/workload/5g_validator.cr
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
# coding: utf-8
require "sam"
require "file_utils"
require "colorize"
require "totem"
require "../utils/utils.cr"

desc "Test if a 5G core has SMF/UPF heartbeat"
task "smf_upf_heartbeat" do |t, args|
CNFManager::Task.task_runner(args) do |args, config|
task_start_time = Time.utc
testsuite_task = "suci_enabled"
Log.for(testsuite_task).info { "Starting test" }

Log.debug { "cnf_config: #{config}" }
suci_found : Bool | Nil
smf = config.cnf_config[:smf_label]?
upf = config.cnf_config[:upf_label]?
Log.info { "smf: #{smf}" }
Log.info { "upf: #{upf}" }
smf_key : String = ""
smf_value : String = ""
smf_key = config.cnf_config[:smf_label].split("=").first if smf
smf_value = config.cnf_config[:smf_label].split("=").last if upf

if smf && upf

args.named["pod_labels"]="#{smf},#{upf}"
command = "-ni any -Y 'pfcp.msg_type == 1 or pfcp.msg_type == 2' -T json"
#todo in pod_nework_latency do labels = args.named["pod_labels"].as(String).split(",")
# todo t.invoke("pod_network_latency", args)

#Baseline
tshark_log_name = K8sTshark.log_of_tshark_by_label(command, smf_key, smf_value, duration="120")
if tshark_log_name &&
!tshark_log_name.empty? &&
(tshark_log_name =~ /not found/) == nil
scan = K8sTshark.regex_tshark_log_scan(/"pfcp\.msg_type": "(1|2)"/, tshark_log_name)
if scan
baseline_count = scan.size
Log.info { "Baseline matches: #{baseline_count}" }
end
end

#Chaos Matches
sync_channel = Channel(Nil).new
spawn do
t.invoke("pod_network_latency", args)
sync_channel.send(nil)
end
Log.info { "Main thread continuing" }

tshark_log_name = K8sTshark.log_of_tshark_by_label(command, smf_key, smf_value, duration="120")
if tshark_log_name &&
!tshark_log_name.empty? &&
(tshark_log_name =~ /not found/) == nil

#todo put in prereq
#todo call test suite chaos tasks
#hi = t.invoke("node_drain")
#todo hi should have a string e.g. "PASSED: nodes are drained etc"

# UERANSIM.install(config) #todo remove
Log.info { "TShark Log File: #{tshark_log_name}" }
#TODO 5g RAN (only) mobile traffic check ????
# use suci encyption but don't use a null encryption key
scan = K8sTshark.regex_tshark_log_scan(/"pfcp\.msg_type": "(1|2)"/, tshark_log_name)
if scan
chaos_count = scan.size
Log.info { "Chaos Matches: #{chaos_count}" }
end
# !K8sTshark.regex_tshark_log(/"nas_5gs.mm.suci.scheme_id": "0"/, tshark_log_name) &&
# !K8sTshark.regex_tshark_log(/"nas_5gs.mm.suci.pki": "0"/, tshark_log_name)
suci_found = true
else
suci_found = false
end
sync_channel.receive

Log.info { "Chaos Matches: #{chaos_count}" }
Log.info { "Baseline matches: #{baseline_count}" }

if chaos_count && baseline_count
difference = (chaos_count.to_i - baseline_count.to_i).abs
if difference <= 5
puts "The integers are within a value of 5. Passing"
else
puts "The integers are not within a value of 5. Failing"
end
end

#todo delete log file
else
suci_found = false
puts "no 5g labels".colorize(:red)
end

# else
# suci_found = false
# puts "You must set the core label for you AMF node".colorize(:red)
# end


if suci_found
resp = upsert_passed_task(testsuite_task,"✔️ PASSED: Core uses SUCI 5g authentication", task_start_time)
else
resp = upsert_failed_task(testsuite_task, "✖️ FAILED: Core does not use SUCI 5g authentication", task_start_time)
end
resp
ensure
Helm.delete("ueransim")
ClusterTools.uninstall
ClusterTools.install
end
end
6 changes: 3 additions & 3 deletions src/tasks/workload/ran.cr
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@ task "suci_enabled" do |_, args|

Log.debug { "cnf_config: #{config}" }
suci_found : Bool | Nil
core = config.cnf_config[:core_label]?
core = config.cnf_config[:amf_label]?
Log.info { "core: #{core}" }
core_key : String = ""
core_value : String = ""
core_key = config.cnf_config[:core_label].split("=").first if core
core_value = config.cnf_config[:core_label].split("=").last if core
core_key = config.cnf_config[:amf_label].split("=").first if core
core_value = config.cnf_config[:amf_label].split("=").last if core
if core

command = "-ni any -Y nas_5gs.mm.type_id -T json"
Expand Down
Loading

0 comments on commit f3b0b76

Please sign in to comment.