The following Python program was generated by ChatGPT in response to my question requesting a resource monitor to log resource usage on a Raspberry Pi 5. I initially documented this application in my Data Modes column in the February 2026 issue of Practical Wireless.
Before running the software, you need to install the prerequisites with the following lines:
sudo apt update
sudo apt install python3-pip python3-psutil -y
Here is the full Python program:
#!/usr/bin/env python3
import csv
import os
import time
import subprocess
from datetime import datetime
import psutil
# ====== CONFIG ======
LOG_FILE = "rpi5_resource_log.csv"
INTERVAL_SECONDS = 60 # 60 seconds = 1 minute
DISK_PATH = "/" # Root filesystem (your SD card)
# =====================
def get_cpu_temp():
"""
Uses vcgencmd to read SoC temperature (covers CPU/GPU on Pi).
Returns float Celsius or None if unavailable.
"""
try:
out = subprocess.check_output(
["vcgencmd", "measure_temp"],
encoding="utf-8",
errors="ignore",
)
# Example output: "temp=52.3'C\n"
parts = out.strip().split("=")
if len(parts) < 2:
return None
value = parts[1].split("'")[0]
return float(value)
except Exception:
return None
def get_gpu_core_freq_mhz():
"""
Uses vcgencmd to read the GPU/core clock.
Returns float MHz or None if unavailable.
"""
try:
out = subprocess.check_output(
["vcgencmd", "measure_clock", "core"],
encoding="utf-8",
errors="ignore",
)
# Example output: "frequency(1)=600000000\n"
parts = out.strip().split("=")
if len(parts) < 2:
return None
hz = float(parts[1])
return hz / 1_000_000.0
except Exception:
return None
def build_fieldnames(num_cores: int):
fieldnames = ["timestamp", "cpu_temp_c"]
fieldnames += [f"cpu_core_{i}_percent" for i in range(num_cores)]
fieldnames += [
"gpu_core_freq_mhz",
"ram_used_mb",
"ram_percent",
"disk_used_gb",
"disk_percent",
]
return fieldnames
def ensure_header(path: str, fieldnames):
"""
Create CSV with header if it doesn't exist or is empty.
"""
needs_header = True
if os.path.exists(path):
try:
with open(path, "r", newline="") as f:
first_line = f.readline()
if first_line:
needs_header = False
except Exception:
pass
if needs_header:
with open(path, "w", newline="") as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
def main():
num_cores = psutil.cpu_count(logical=True)
fieldnames = build_fieldnames(num_cores)
ensure_header(LOG_FILE, fieldnames)
print(f"Logging to {LOG_FILE} every {INTERVAL_SECONDS} seconds. Press Ctrl+C to stop.")
try:
while True:
loop_start = time.time()
timestamp = datetime.now().isoformat(timespec="seconds")
# CPU temp and GPU clock
cpu_temp = get_cpu_temp()
gpu_freq = get_gpu_core_freq_mhz()
# CPU usage per core (1-second averaging)
cpu_percents = psutil.cpu_percent(interval=1, percpu=True)
# RAM
vm = psutil.virtual_memory()
ram_used_mb = vm.used / (1024 * 1024)
ram_percent = vm.percent
# Disk (SD card / root FS)
du = psutil.disk_usage(DISK_PATH)
disk_used_gb = du.used / (1024 * 1024 * 1024)
disk_percent = du.percent
# Build row
row = {
"timestamp": timestamp,
"cpu_temp_c": cpu_temp if cpu_temp is not None else "",
"gpu_core_freq_mhz": round(gpu_freq, 2) if gpu_freq is not None else "",
"ram_used_mb": round(ram_used_mb, 1),
"ram_percent": ram_percent,
"disk_used_gb": round(disk_used_gb, 2),
"disk_percent": disk_percent,
}
# Add per-core CPU usage
for i in range(num_cores):
key = f"cpu_core_{i}_percent"
if i < len(cpu_percents):
row[key] = cpu_percents[i]
else:
row[key] = ""
# Append to CSV
with open(LOG_FILE, "a", newline="") as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writerow(row)
# Sleep so total loop time ~ INTERVAL_SECONDS
elapsed = time.time() - loop_start
remaining = INTERVAL_SECONDS - elapsed
if remaining > 0:
time.sleep(remaining)
except KeyboardInterrupt:
print("\nStopping logging.")
if __name__ == "__main__":
main()
Save the program as log_resources.py in a directory of your choice. You could use /home/pi or /usr/share, Either will be ok. Once saved, you need to open the installation directory in a terminal session and make the file executable with the following code:
sudo chmod +x log_resources.py
You can then run the program by entering the following:
./log_resources.py
This will log the data to a CSV file called log_resources.csv in the same directory as your Python script. This can be opened for analysis and display using Excel or most other spreadsheet applications. By default, the monitoring interval is set to 60 seconds, but it can be changed by altering the INTERVAL_SECONDS variable near the top of the Python script. If you want to make more significant changes to the script, I suggest you copy it to ChatGPT and ask for the additional features you need or generate an entirely new question.
Happy Monitoring, Mike – G4WNC