forked from rpp0/lora-phy-fingerprinting
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmapping.py
90 lines (78 loc) · 2.29 KB
/
mapping.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import numpy as np
# Mapping from device ID to vendor ID
HARDCODED_VENDOR_DICT = { # TODO: Make this non-hardcoded in a distant future
# 1: RN24
# 2: SX
# 3: RF96
1: 1,
2: 1,
3: 1,
4: 2,
5: 3,
6: 3,
7: 1,
8: 1,
9: 1,
10: 1,
11: 1,
12: 1,
13: 1,
14: 1,
15: 1,
16: 1,
17: 1,
18: 1,
19: 1,
20: 1,
21: 1,
22: 1,
}
# The Mapping class provides a mapping between a LoRa ID from the dataset, and one-hot label vector used in Tensorflow
class Mapping():
def __init__(self, lora_ids, exclude_classes=[], name="mapping"):
self.name = name
self._dict = {}
self._lid = 0
self.size = 0
# Fill mapping
self.update(lora_ids, exclude_classes)
def add(self, lora_id):
if not lora_id in self._dict.keys():
self._dict[lora_id] = self._lid
self._lid += 1
self.size += 1
def update(self, lora_ids, exclude_classes):
for lora_id in lora_ids:
if not lora_id in exclude_classes:
self.add(lora_id)
def keys(self):
return self._dict.keys()
def display(self):
if len(self._dict.keys()) > 0:
print("[+] Mapping from LoRa to TF is:")
for lora_id in self._dict.keys():
print("\t LoRa " + str(lora_id) + " -> " + str(self.lora_id_to_oh(lora_id)) + " (" + str(self.lora_to_map_id(lora_id)) + ")")
else:
print("[-] Warning: no mapping created yet from LoRa ID to Mapping ID / one hot vector.")
def lora_to_map_id(self, lora_id):
try:
return self._dict[lora_id]
except KeyError:
return None
def map_to_lora_id(self, map_id):
for lora_id in self._dict.keys():
if self._dict[lora_id] == map_id:
return lora_id
return None
def oh_to_lora_id(self, oh):
map_id = np.argmax(oh)
return self.map_to_lora_id(map_id)
def lora_id_to_oh(self, lora_id):
map_id = self.lora_to_map_id(lora_id)
oh = [0] * self.size
if not (map_id is None):
oh[map_id] = 1
return oh
def lora_id_to_vendor_id(self, lora_id):
global HARDCODED_VENDOR_DICT
return HARDCODED_VENDOR_DICT[lora_id]