Skip to content

Commit c5404a7

Browse files
authored
Merge pull request #92 from jaredmauch/main
Several medium sized improvements
2 parents be2c3ea + 950fd4b commit c5404a7

File tree

16 files changed

+1449
-168
lines changed

16 files changed

+1449
-168
lines changed

classes/protocol_settings.py

Lines changed: 69 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1149,41 +1149,82 @@ def process_registery(self, registry : Union[dict[int, int], dict[int, bytes]] ,
11491149

11501150
concatenate_registry : dict = {}
11511151
info = {}
1152+
1153+
# First pass: process all non-concatenated entries
11521154
for entry in map:
1153-
11541155
if entry.register not in registry:
11551156
continue
1156-
value = ""
1157-
1158-
if isinstance(registry[entry.register], bytes):
1159-
value = self.process_register_bytes(registry, entry)
1160-
else:
1161-
value = self.process_register_ushort(registry, entry)
1162-
1163-
#if item.unit:
1164-
# value = str(value) + item.unit
1157+
1158+
if not entry.concatenate:
1159+
value = ""
1160+
if isinstance(registry[entry.register], bytes):
1161+
value = self.process_register_bytes(registry, entry)
1162+
else:
1163+
value = self.process_register_ushort(registry, entry)
1164+
info[entry.variable_name] = value
1165+
1166+
# Second pass: process concatenated entries
1167+
for entry in map:
1168+
if entry.register not in registry:
1169+
continue
1170+
11651171
if entry.concatenate:
1166-
concatenate_registry[entry.register] = value
1167-
1168-
all_exist = True
1169-
for key in entry.concatenate_registers:
1170-
if key not in concatenate_registry:
1171-
all_exist = False
1172-
break
1173-
if all_exist:
1174-
#if all(key in concatenate_registry for key in item.concatenate_registers):
1175-
concatenated_value = ""
1176-
for key in entry.concatenate_registers:
1177-
concatenated_value = concatenated_value + str(concatenate_registry[key])
1178-
del concatenate_registry[key]
1179-
1180-
#replace null characters with spaces and trim
1172+
# For concatenated entries, we need to process each register in the concatenate_registers list
1173+
concatenated_value = ""
1174+
all_registers_exist = True
1175+
1176+
# For ASCII concatenated variables, extract 8-bit characters from 16-bit registers
1177+
if entry.data_type == Data_Type.ASCII:
1178+
for reg in entry.concatenate_registers:
1179+
if reg not in registry:
1180+
all_registers_exist = False
1181+
break
1182+
1183+
reg_value = registry[reg]
1184+
# Extract high byte (bits 8-15) and low byte (bits 0-7)
1185+
high_byte = (reg_value >> 8) & 0xFF
1186+
low_byte = reg_value & 0xFF
1187+
1188+
# Convert each byte to ASCII character (low byte first, then high byte)
1189+
low_char = chr(low_byte)
1190+
high_char = chr(high_byte)
1191+
concatenated_value += low_char + high_char
1192+
else:
1193+
for reg in entry.concatenate_registers:
1194+
if reg not in registry:
1195+
all_registers_exist = False
1196+
break
1197+
1198+
# Create a temporary entry for this register to process it
1199+
temp_entry = registry_map_entry(
1200+
registry_type=entry.registry_type,
1201+
register=reg,
1202+
register_bit=0,
1203+
register_byte=0,
1204+
variable_name=f"temp_{reg}",
1205+
documented_name=f"temp_{reg}",
1206+
unit="",
1207+
unit_mod=1.0,
1208+
concatenate=False,
1209+
concatenate_registers=[],
1210+
values=[],
1211+
data_type=entry.data_type,
1212+
data_type_size=entry.data_type_size
1213+
)
1214+
1215+
if isinstance(registry[reg], bytes):
1216+
value = self.process_register_bytes(registry, temp_entry)
1217+
else:
1218+
value = self.process_register_ushort(registry, temp_entry)
1219+
1220+
concatenated_value += str(value)
1221+
1222+
if all_registers_exist:
1223+
# Replace null characters with spaces and trim for ASCII
11811224
if entry.data_type == Data_Type.ASCII:
11821225
concatenated_value = concatenated_value.replace("\x00", " ").strip()
1183-
1226+
11841227
info[entry.variable_name] = concatenated_value
1185-
else:
1186-
info[entry.variable_name] = value
11871228

11881229
return info
11891230

classes/transports/canbus.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -240,6 +240,18 @@ def read_data(self) -> dict[str, str]:
240240

241241
info.update(new_info)
242242

243+
# Check for serial number variables and promote to device_serial_number
244+
if info:
245+
# Look for serial number variable
246+
if "serial_number" in info:
247+
value = info["serial_number"]
248+
if value and value != "None" and str(value).strip():
249+
# Found a valid serial number, promote it
250+
if self.device_serial_number != str(value):
251+
self._log.info(f"Promoting parsed serial number: {value} (from variable: serial_number)")
252+
self.device_serial_number = str(value)
253+
self.update_identifier()
254+
243255
currentTime = time.time()
244256

245257
if not info:

classes/transports/influxdb_out.py

Lines changed: 182 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,182 @@
1+
import sys
2+
from configparser import SectionProxy
3+
from typing import TextIO
4+
import time
5+
6+
from defs.common import strtobool
7+
8+
from ..protocol_settings import Registry_Type, WriteMode, registry_map_entry
9+
from .transport_base import transport_base
10+
11+
12+
class influxdb_out(transport_base):
13+
''' InfluxDB v1 output transport that writes data to an InfluxDB server '''
14+
host: str = "localhost"
15+
port: int = 8086
16+
database: str = "solar"
17+
username: str = ""
18+
password: str = ""
19+
measurement: str = "device_data"
20+
include_timestamp: bool = True
21+
include_device_info: bool = True
22+
batch_size: int = 100
23+
batch_timeout: float = 10.0
24+
25+
client = None
26+
batch_points = []
27+
last_batch_time = 0
28+
29+
def __init__(self, settings: SectionProxy):
30+
self.host = settings.get("host", fallback=self.host)
31+
self.port = settings.getint("port", fallback=self.port)
32+
self.database = settings.get("database", fallback=self.database)
33+
self.username = settings.get("username", fallback=self.username)
34+
self.password = settings.get("password", fallback=self.password)
35+
self.measurement = settings.get("measurement", fallback=self.measurement)
36+
self.include_timestamp = strtobool(settings.get("include_timestamp", fallback=self.include_timestamp))
37+
self.include_device_info = strtobool(settings.get("include_device_info", fallback=self.include_device_info))
38+
self.batch_size = settings.getint("batch_size", fallback=self.batch_size)
39+
self.batch_timeout = settings.getfloat("batch_timeout", fallback=self.batch_timeout)
40+
41+
self.write_enabled = True # InfluxDB output is always write-enabled
42+
super().__init__(settings)
43+
44+
def connect(self):
45+
"""Initialize the InfluxDB client connection"""
46+
self._log.info("influxdb_out connect")
47+
48+
try:
49+
from influxdb import InfluxDBClient
50+
51+
# Create InfluxDB client
52+
self.client = InfluxDBClient(
53+
host=self.host,
54+
port=self.port,
55+
username=self.username if self.username else None,
56+
password=self.password if self.password else None,
57+
database=self.database
58+
)
59+
60+
# Test connection
61+
self.client.ping()
62+
63+
# Create database if it doesn't exist
64+
databases = self.client.get_list_database()
65+
if not any(db['name'] == self.database for db in databases):
66+
self._log.info(f"Creating database: {self.database}")
67+
self.client.create_database(self.database)
68+
69+
self.connected = True
70+
self._log.info(f"Connected to InfluxDB at {self.host}:{self.port}")
71+
72+
except ImportError:
73+
self._log.error("InfluxDB client not installed. Please install with: pip install influxdb")
74+
self.connected = False
75+
except Exception as e:
76+
self._log.error(f"Failed to connect to InfluxDB: {e}")
77+
self.connected = False
78+
79+
def write_data(self, data: dict[str, str], from_transport: transport_base):
80+
"""Write data to InfluxDB"""
81+
if not self.write_enabled or not self.connected:
82+
return
83+
84+
self._log.info(f"write data from [{from_transport.transport_name}] to influxdb_out transport")
85+
self._log.info(data)
86+
87+
# Prepare tags for InfluxDB
88+
tags = {}
89+
90+
# Add device information as tags if enabled
91+
if self.include_device_info:
92+
tags.update({
93+
"device_identifier": from_transport.device_identifier,
94+
"device_name": from_transport.device_name,
95+
"device_manufacturer": from_transport.device_manufacturer,
96+
"device_model": from_transport.device_model,
97+
"device_serial_number": from_transport.device_serial_number,
98+
"transport": from_transport.transport_name
99+
})
100+
101+
# Prepare fields (the actual data values)
102+
fields = {}
103+
for key, value in data.items():
104+
# Check if we should force float formatting based on protocol settings
105+
should_force_float = False
106+
107+
# Try to get registry entry from protocol settings to check unit_mod
108+
if hasattr(from_transport, 'protocolSettings') and from_transport.protocolSettings:
109+
# Check both input and holding registries
110+
for registry_type in [Registry_Type.INPUT, Registry_Type.HOLDING]:
111+
registry_map = from_transport.protocolSettings.get_registry_map(registry_type)
112+
for entry in registry_map:
113+
if entry.variable_name == key:
114+
# If unit_mod is not 1.0, this value should be treated as float
115+
if entry.unit_mod != 1.0:
116+
should_force_float = True
117+
self._log.debug(f"Variable {key} has unit_mod {entry.unit_mod}, forcing float format")
118+
break
119+
if should_force_float:
120+
break
121+
122+
# Try to convert to numeric values for InfluxDB
123+
try:
124+
# Try to convert to float first
125+
float_val = float(value)
126+
127+
# If it's an integer but should be forced to float, or if it's already a float
128+
if should_force_float or not float_val.is_integer():
129+
fields[key] = float_val
130+
else:
131+
fields[key] = int(float_val)
132+
except (ValueError, TypeError):
133+
# If conversion fails, store as string
134+
fields[key] = str(value)
135+
136+
# Create InfluxDB point
137+
point = {
138+
"measurement": self.measurement,
139+
"tags": tags,
140+
"fields": fields
141+
}
142+
143+
# Add timestamp if enabled
144+
if self.include_timestamp:
145+
point["time"] = int(time.time() * 1e9) # Convert to nanoseconds
146+
147+
# Add to batch
148+
self.batch_points.append(point)
149+
150+
# Check if we should flush the batch
151+
current_time = time.time()
152+
if (len(self.batch_points) >= self.batch_size or
153+
(current_time - self.last_batch_time) >= self.batch_timeout):
154+
self._flush_batch()
155+
156+
def _flush_batch(self):
157+
"""Flush the batch of points to InfluxDB"""
158+
if not self.batch_points:
159+
return
160+
161+
try:
162+
self.client.write_points(self.batch_points)
163+
self._log.info(f"Wrote {len(self.batch_points)} points to InfluxDB")
164+
self.batch_points = []
165+
self.last_batch_time = time.time()
166+
except Exception as e:
167+
self._log.error(f"Failed to write batch to InfluxDB: {e}")
168+
self.connected = False
169+
170+
def init_bridge(self, from_transport: transport_base):
171+
"""Initialize bridge - not needed for InfluxDB output"""
172+
pass
173+
174+
def __del__(self):
175+
"""Cleanup on destruction - flush any remaining points"""
176+
if self.batch_points:
177+
self._flush_batch()
178+
if self.client:
179+
try:
180+
self.client.close()
181+
except Exception:
182+
pass

0 commit comments

Comments
 (0)