Python Integration
This page provides practical Python examples for working with the FGA Logger — reading live serial data, saving to CSV, and loading data for analysis.
Dependencies:
Install with:
pip install pyserial pandas
Reading Live Serial Data
This example connects to the FGA Logger over USB and prints each incoming CSV row:
import serial
# Change to your port:
# Windows: 'COM3', 'COM4', etc.
# Linux: '/dev/ttyUSB0'
# macOS: '/dev/tty.usbserial-XXXX'
PORT = 'COM3'
BAUD = 115200 # Adjust to match your FGA Logger serial settings
def read_logger(port, baud):
with serial.Serial(port, baud, timeout=2) as ser:
print(f"Connected to {port} at {baud} baud")
print("Waiting for data...\n")
while True:
line = ser.readline().decode('utf-8', errors='replace').strip()
if line:
print(line)
if __name__ == '__main__':
read_logger(PORT, BAUD)
Saving Serial Data to CSV
This example reads from the FGA Logger and saves all incoming rows to a local CSV file. The first received header line is written once; all subsequent data rows follow.
import serial
import csv
from datetime import datetime
PORT = 'COM3'
BAUD = 115200
OUTFILE = f"fga_log_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv"
# Expected CSV header from FGA Logger
EXPECTED_HEADER = [
'Timestamp_ms', 'B1x_nT', 'B1y_nT', 'B1z_nT', 'B1v_nT',
'B2x_nT', 'B2y_nT', 'B2z_nT', 'B2v_nT',
'Lat_deg', 'Lon_deg', 'Alt_m', 'SIV', 'Fix', 'HDOP_m'
]
def save_to_csv(port, baud, outfile):
header_written = False
with serial.Serial(port, baud, timeout=5) as ser, \
open(outfile, 'w', newline='') as f:
writer = csv.writer(f)
print(f"Logging to {outfile}")
try:
while True:
line = ser.readline().decode('utf-8', errors='replace').strip()
if not line:
continue
fields = line.split(',')
# Write header once (either from device or our expected header)
if not header_written:
if fields[0] == 'Timestamp_ms':
writer.writerow(fields) # Use device header
else:
writer.writerow(EXPECTED_HEADER) # Use expected header
writer.writerow(fields) # Write this row as data
header_written = True
f.flush()
continue
# Write data rows
writer.writerow(fields)
f.flush()
print(f" {line[:80]}") # Print first 80 chars for monitoring
except KeyboardInterrupt:
print(f"\nLogging stopped. File saved: {outfile}")
if __name__ == '__main__':
save_to_csv(PORT, BAUD, OUTFILE)
Loading a CSV File with pandas
Once you have a CSV file — either from the SD card or saved via serial — load it with pandas for analysis:
import pandas as pd
CSV_FILE = 'fga_log_20240315_143022.csv'
# Load the CSV
df = pd.read_csv(CSV_FILE)
print("Shape:", df.shape)
print("\nFirst rows:")
print(df.head())
print("\nColumn types:")
print(df.dtypes)
print("\nBasic statistics:")
print(df[['B1x_nT', 'B1y_nT', 'B1z_nT', 'B1v_nT']].describe())
Computing the Gradient (Gradiometer Mode)
In gradiometer configurations with two sensor assemblies, compute the gradient per axis:
import pandas as pd
df = pd.read_csv('fga_log.csv')
# Compute gradient (sensor 1 minus sensor 2) per axis
df['Grad_x_nT'] = df['B1x_nT'] - df['B2x_nT']
df['Grad_y_nT'] = df['B1y_nT'] - df['B2y_nT']
df['Grad_z_nT'] = df['B1z_nT'] - df['B2z_nT']
# Total gradient magnitude
df['Grad_v_nT'] = (
df['Grad_x_nT']**2 +
df['Grad_y_nT']**2 +
df['Grad_z_nT']**2
) ** 0.5
print(df[['Timestamp_ms', 'Grad_x_nT', 'Grad_y_nT', 'Grad_z_nT', 'Grad_v_nT']].head(10))
Filtering by GPS Quality
Filter out rows with no GPS fix or poor accuracy before analysis:
import pandas as pd
df = pd.read_csv('fga_log.csv')
# Keep only rows with 3D GPS fix and HDOP below 2.0
df_clean = df[
(df['Fix'] == 3) &
(df['HDOP_m'] < 2.0) &
(df['SIV'] >= 4)
].copy()
print(f"Total rows: {len(df)}")
print(f"Clean rows: {len(df_clean)}")
print(f"Removed: {len(df) - len(df_clean)}")
Plotting the Total Field
import pandas as pd
import matplotlib.pyplot as plt
df = pd.read_csv('fga_log.csv')
plt.figure(figsize=(12, 4))
plt.plot(df['Timestamp_ms'] / 1000, df['B1v_nT'], linewidth=0.8)
plt.xlabel('Time (s)')
plt.ylabel('Total Field B1 (nT)')
plt.title('FGA Logger — Total Magnetic Field')
plt.tight_layout()
plt.savefig('field_plot.png', dpi=150)
plt.show()
Exporting GPS Track to GeoJSON
Export the GPS track for use in QGIS or other GIS tools:
import pandas as pd
import json
df = pd.read_csv('fga_log.csv')
# Keep only rows with valid GPS fix
df_gps = df[df['Fix'] >= 2].dropna(subset=['Lat_deg', 'Lon_deg'])
# Build GeoJSON FeatureCollection
features = []
for _, row in df_gps.iterrows():
feature = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [row['Lon_deg'], row['Lat_deg'], row['Alt_m']]
},
"properties": {
"timestamp_ms": row['Timestamp_ms'],
"B1v_nT": row['B1v_nT'],
"B2v_nT": row['B2v_nT'],
}
}
features.append(feature)
geojson = {"type": "FeatureCollection", "features": features}
with open('track.geojson', 'w') as f:
json.dump(geojson, f, indent=2)
print(f"Exported {len(features)} points to track.geojson")