Skip to content

Commit

Permalink
Add scripts for using cygprofile to repository
Browse files Browse the repository at this point in the history
These scripts, which are used to create a link order file for Linux based
builds (Linux, Android, ChromeOS). This commit simply moves them from a
Google private web page where they have lived for the last couple of years.

The scripts are:
mergetraces.py - merge multiple cygprofile traces into a single trace
symbolise.py - convert traces into an order file with symbols
patch_orderfile.py - complete the order file with symbols from Chrome.

I will seperately add a page on creating an order file to the Chromium
web site.

Review URL: https://chromiumcodereview.appspot.com/16151006

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@203143 0039d316-1c4b-4281-b951-d872f2087c98
  • Loading branch information
[email protected] committed May 30, 2013
1 parent adbed22 commit da0ac0f
Show file tree
Hide file tree
Showing 3 changed files with 554 additions and 0 deletions.
186 changes: 186 additions & 0 deletions tools/cygprofile/mergetraces.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
#!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

# Use: ../mergetraces.py `ls cyglog.* -Sr` > merged_cyglog

""""Merge multiple logs files from different processes into a single log.
Given two log files of execution traces, merge the traces into a single trace.
Merging will use timestamps (i.e. the first two columns of logged calls) to
create a single log that is an ordered trace of calls by both processes.
"""

import optparse
import os
import string
import subprocess
import sys

def ParseLogLines(lines):
"""Parse log file lines.
Args:
lines: lines from log file produced by profiled run
Below is an example of a small log file:
5086e000-52e92000 r-xp 00000000 b3:02 51276 libchromeview.so
secs msecs pid:threadid func
START
1314897086 795828 3587:1074648168 0x509e105c
1314897086 795874 3587:1074648168 0x509e0eb4
1314897086 796326 3587:1074648168 0x509e0e3c
1314897086 796552 3587:1074648168 0x509e07bc
END
Returns:
tuple conisiting of 1) an ordered list of the logged calls, as an array of
fields, 2) the virtual start address of the library, used to compute the
offset of the symbol in the library and 3) the virtual end address
"""
call_lines = []
vm_start = 0
vm_end = 0
dash_index = lines[0].find ('-')
space_index = lines[0].find (' ')
vm_start = int (lines[0][:dash_index], 16)
vm_end = int (lines[0][dash_index+1:space_index], 16)
for line in lines[2:]:
line = line.strip()
# print hex (vm_start)
fields = line.split()
call_lines.append (fields)

return (call_lines, vm_start, vm_end)

def HasDuplicates(calls):
"""Funcition is a sanity check to make sure that calls are only logged once.
Args:
calls: list of calls logged
Returns:
boolean indicating if calls has duplicate calls
"""
seen = []
for call in calls:
if call[3] in seen:
return true
else:
seen.append(call[3])

def CheckTimestamps(calls):
"""Prints warning to stderr if the call timestamps are not in order.
Args:
calls: list of calls logged
"""
index = 0
last_timestamp_secs = -1
last_timestamp_ms = -1
while (index < len (calls)):
timestamp_secs = int (calls[index][0])
timestamp_ms = int (calls[index][1])
timestamp = (timestamp_secs * 1000000) + timestamp_ms
last_timestamp = (last_timestamp_secs * 1000000) + last_timestamp_ms
if (timestamp < last_timestamp):
sys.stderr.write("WARNING: last_timestamp: " + str(last_timestamp_secs)
+ " " + str(last_timestamp_ms) + " timestamp: "
+ str(timestamp_secs) + " " + str(timestamp_ms) + "\n")
last_timestamp_secs = timestamp_secs
last_timestamp_ms = timestamp_ms
index = index + 1

def Convert (call_lines, startAddr, endAddr):
"""Converts the call addresses to static offsets and removes invalid calls.
Removes profiled calls not in shared library using start and end virtual
addresses, converts strings to integer values, coverts virtual addresses to
address in shared library.
Returns:
list of calls as tuples (sec, msec, pid:tid, callee)
"""
converted_calls = []
call_addresses = []
for fields in call_lines:
secs = int (fields[0])
msecs = int (fields[1])
callee = int (fields[3], 16)
# print ("callee: " + hex (callee) + " start: " + hex (startAddr) + " end: "
# + hex (endAddr))
if (callee >= startAddr and callee < endAddr
and (not callee in call_addresses)):
converted_calls.append((secs, msecs, fields[2], (callee - startAddr)))
call_addresses.append(callee)
return converted_calls

def Timestamp(trace_entry):
return int (trace_entry[0]) * 1000000 + int(trace_entry[1])

def AddTrace (tracemap, trace):
"""Adds a trace to the tracemap.
Adds entries in the trace to the tracemap. All new calls will be added to
the tracemap. If the calls already exist in the tracemap then they will be
replaced if they happened sooner in the new trace.
Args:
tracemap: the tracemap
trace: the trace
"""
for trace_entry in trace:
call = trace_entry[3]
if (not call in tracemap) or (
Timestamp(tracemap[call]) > Timestamp(trace_entry)):
tracemap[call] = trace_entry

def main():
"""Merge two traces for code in specified library and write to stdout.
Merges the two traces and coverts the virtual addresses to the offsets in the
library. First line of merged trace has dummy virtual address of 0-ffffffff
so that symbolizing the addresses uses the addresses in the log, since the
addresses have already been converted to static offsets.
"""
parser = optparse.OptionParser('usage: %prog trace1 ... traceN')
(_, args) = parser.parse_args()
if len(args) <= 1:
parser.error('expected at least the following args: trace1 trace2')

step = 0
tracemap = dict()
for trace_file in args:
step += 1
sys.stderr.write(" " + str(step) + "/" + str(len(args)) +
": " + trace_file + ":\n")

trace_lines = map(string.rstrip, open(trace_file).readlines())
(trace_calls, trace_start, trace_end) = ParseLogLines(trace_lines)
CheckTimestamps(trace_calls)
sys.stderr.write("Len: " + str(len(trace_calls)) +
". Start: " + hex(trace_start) +
", end: " + hex(trace_end) + '\n')

trace_calls = Convert(trace_calls, trace_start, trace_end)
sys.stderr.write("Converted len: " + str(len(trace_calls)) + "\n")

AddTrace(tracemap, trace_calls)
sys.stderr.write("Merged len: " + str(len(tracemap)) + "\n")

# Extract the resulting trace from the tracemap
merged_trace = []
for call in tracemap:
merged_trace.append(tracemap[call])
merged_trace.sort(key=Timestamp)

print "0-ffffffff r-xp 00000000 xx:00 00000 ./"
print "secs\tmsecs\tpid:threadid\tfunc"
for call in merged_trace:
print (str(call[0]) + "\t" + str(call[1]) + "\t" + call[2] + "\t" +
hex(call[3]))

if __name__ == '__main__':
main()
117 changes: 117 additions & 0 deletions tools/cygprofile/patch_orderfile.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
#!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import commands
import os
import sys

orderfile = sys.argv[1]
uninstrumented_shlib = sys.argv[2]

nmlines_uninstrumented = commands.getoutput ('nm -S -n ' +
uninstrumented_shlib + ' | egrep "( t )|( W )|( T )"').split('\n')

nmlines = []
for nmline in nmlines_uninstrumented:
if (len(nmline.split()) == 4):
nmlines.append(nmline)

# Map addresses to list of functions at that address. There are multiple
# functions at an address because of aliasing.
nm_index = 0
uniqueAddrs = []
addressMap = {}
while nm_index < len(nmlines):
if (len(nmlines[nm_index].split()) == 4):
nm_int = int (nmlines[nm_index].split()[0], 16)
size = int (nmlines[nm_index].split()[1], 16)
fnames = [nmlines[nm_index].split()[3]]
nm_index = nm_index + 1
while nm_index < len(nmlines) and nm_int == int (
nmlines[nm_index].split()[0], 16):
fnames.append(nmlines[nm_index].split()[3])
nm_index = nm_index + 1
addressMap[nm_int] = fnames
uniqueAddrs.append((nm_int, size))
else:
nm_index = nm_index + 1

def binary_search (addr, start, end):
# print "addr: " + str(addr) + " start: " + str(start) + " end: " + str(end)
if start >= end or start == end - 1:
(nm_addr, size) = uniqueAddrs[start]
if not (addr >= nm_addr and addr < nm_addr + size):
sys.stderr.write ("ERROR: did not find function in binary: addr: " +
hex(addr) + " nm_addr: " + str(nm_addr) + " start: " + str(start) +
" end: " + str(end) + "\n")
raise Error("error")
return (addressMap[nm_addr], size)
else:
halfway = start + ((end - start) / 2)
(nm_addr, size) = uniqueAddrs[halfway]
# print "nm_addr: " + str(nm_addr) + " halfway: " + str(halfway)
if (addr >= nm_addr and addr < nm_addr + size):
return (addressMap[nm_addr], size)
elif (addr < nm_addr):
return binary_search (addr, start, halfway)
elif (addr >= nm_addr + size):
return binary_search (addr, halfway, end)
else:
raise "ERROR: did not expect this case"

f = open (orderfile)
lines = f.readlines()
profiled_list = []
for line in lines:
if (line.strip() == ''):
continue
functionName = line.replace('.text.', '').split('.clone.')[0].strip()
profiled_list.append (functionName)

# Symbol names are not unique. Since the order file uses symbol names, the
# patched order file pulls in all symbols with the same name. Multiple function
# addresses for the same function name may also be due to ".clone" symbols,
# since the substring is stripped.
functions = []
functionAddressMap = {}
for line in nmlines:
try:
functionName = line.split()[3]
except:
functionName = line.split()[2]
functionName = functionName.split('.clone.')[0]
functionAddress = int (line.split()[0].strip(), 16)
try:
functionAddressMap[functionName].append(functionAddress)
except:
functionAddressMap[functionName] = [functionAddress]
functions.append(functionName)

sys.stderr.write ("profiled list size: " + str(len(profiled_list)) + "\n")
addresses = []
symbols_found = 0
for function in profiled_list:
try:
addrs = functionAddressMap[function]
symbols_found = symbols_found + 1
except:
addrs = []
# sys.stderr.write ("WARNING: could not find symbol " + function + "\n")
for addr in addrs:
if not (addr in addresses):
addresses.append(addr)
sys.stderr.write ("symbols found: " + str(symbols_found) + "\n")

sys.stderr.write ("number of addresses: " + str(len(addresses)) + "\n")
total_size = 0
for addr in addresses:
# if (count % 500 == 0):
# print "current count: " + str(count)
(functions, size) = binary_search (addr, 0, len(uniqueAddrs))
total_size = total_size + size
for function in functions:
print ".text." + function
print ""
sys.stderr.write ("total_size: " + str(total_size) + "\n")
Loading

0 comments on commit da0ac0f

Please sign in to comment.