-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathhackrf_data.py
128 lines (117 loc) · 4.3 KB
/
hackrf_data.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import sys
import numpy as np
from scipy.stats import norm
from numpy import linalg as la
import tensorflow as tf
import tflearn
from matplotlib.ticker import FormatStrFormatter
bw=40e6
fres=100e3
linebw=5e6
linesperchunk=1000
flines= linesperchunk*4
nbins=int(bw/fres)
skiprows = int(bw/linebw)
patchtype = "random"
siglist={0:(800,1070),
1:(1090,1155),
2:(1170,1400),
3:(1660,1720),
4:(1960,2080),
5:(2125,2175),
6:(2200,2275),
7:(3880,3960),
8:(4220,4270),
#9:(4750,4875),
9:(6400,6600),
#11:(6650,6800),
10:(7900,8000),
#13:(8000,8100),
#14:(8100,8200),
11:(9200,9600),
}
'''
siglist={0:(9200,9600),
}
'''
def get_patches(shape, psize, count=100):
xpts = np.random.randint(shape[0]-psize[0],size=count)
ypts = np.random.randint(shape[1]-psize[1],size=count)
return (xpts,ypts)
def lnorm(X_train):
print("Pad:", X_train.shape)
for i in range(X_train.shape[0]):
for j in range(X_train.shape[1]):
X_train[i,j,:] = np.nan_to_num(X_train[i,j,:]/la.norm(X_train[i,j,:],2))
return X_train
def gendata(infile):
farr = np.zeros((1,nbins))
cntr=0
for data in pd.read_csv(infile, header=None, chunksize=linesperchunk*skiprows):
for i in range(linesperchunk):
sf = data[(skiprows*i):(skiprows*(i+1))].sort_values(by=2)
dta= sf.as_matrix(range(6,int(linebw/fres)+6))
farr=np.vstack((farr,dta.flatten()[:nbins]))
cntr=cntr+linesperchunk
if cntr>=flines:
break
print(cntr)
farr=np.delete(farr,0,0)
if patchtype == "random":
tlen = 10
freqlen = 399
patchcnt = 8000
pxorg,pyorg = get_patches(farr.shape,(tlen,freqlen),patchcnt)
pyorg=np.zeros(patchcnt)
patches=[]
for i in range(patchcnt):
patches.append(farr[pxorg[i]:pxorg[i]+tlen,:])
patches=np.array(patches)
print(patches.shape)
train_data=patches
train_data_org = np.copy(train_data)
nmin = np.min(train_data)
#train_data = -np.min(train_data)+ train_data
nmax = np.max(train_data)
#rain_data = (train_data - np.min(train_data))/(np.max(train_data) - np.min(train_data))
#print "before:",np.mean(train_data), np.std(train_data)
train_data = (train_data-np.mean(train_data))/np.std(train_data)
#print "after:",np.min(train_data), np.max(train_data)
train_labels = np.zeros((train_data.shape[0],1))
return train_data,train_labels, nmin, nmax, train_data_org
elif patchtype =="siglist":
nsamples = max([el[1]-el[0] for k,el in siglist.items()])
tsamples = 10
train_data = np.zeros((1,tsamples,nsamples))
train_labels = np.zeros((1,len(siglist)))
minval = np.min(farr)
for key,el in siglist.items():
dta = farr[:,el[0]:el[1]]
res = np.zeros((farr.shape[0],nsamples)) + minval
#append zeros
sigbw = el[1]-el[0]
shift = sigbw/2
mid = nsamples/2
res[:,(mid-shift):(mid+sigbw-shift)] = dta
maxidx = int(dta.shape[0]/tsamples)*tsamples
res = res[:maxidx]
train_cnt = res.shape[0]/tsamples
if train_cnt > 1:
train_data = np.vstack((train_data,np.reshape(res,(train_cnt,tsamples,nsamples))))
dummy_labels = np.zeros((train_cnt,len(siglist)))
dummy_labels[:, key] = 1
train_labels = np.vstack((train_labels,dummy_labels))
print("Training data: Generation done for:", key)
train_data = np.delete(train_data,0,0)
train_labels = np.delete(train_labels,0,0)
#print np.min(train_data)
train_data_org = np.copy(train_data)
nmin = np.min(train_data)
nmax = np.max(train_data)
#rain_data = (train_data - np.min(train_data))/(np.max(train_data) - np.min(train_data))
train_data = (train_data-np.mean(train_data))/np.std(train_data)
print("Min, max:",np.min(train_data), np.max(train_data))
return train_data,train_labels, nmin, nmax, train_data_org