--- error_log_2_csv.py
+++ error_log_2_csv.py
@@ -58,22 +58,30 @@
         # Remove the processed Callbacks string to avoid it being reparsed.
         full_text = full_text[:callbacks_match.start()]
 
-    # 4. 'PC' (e.g., PC:006477D8)
+    # Function to format PC, DA, SP values
+    def format_address_field(value):
+        if value:
+            # Remove potential '0x' prefix for consistent processing
+            if value.lower().startswith('0x'):
+                value = value[2:]
+            # Pad with leading zeros to 8 digits and add '0x' prefix
+            return f'0x{value.zfill(8).upper()}'
+        return value
+
+    # 4. 'PC' (e.g., PC:006477D8)
     pc_match = re.search(r'PC:(\S+)', full_text)
     if pc_match:
-        pc_value = pc_match.group(1)
-        # Remove potential '0x' prefix for consistent processing
-        if pc_value.lower().startswith('0x'):
-            pc_value = pc_value[2:]
-        # Pad with leading zeros to 8 digits and add '0x' prefix
-        data['PC'] = f'0x{pc_value.zfill(8).upper()}'
+        data['PC'] = format_address_field(pc_match.group(1))
+
+    # 5. 'DA' (e.g., DA:006477D8)
+    da_match = re.search(r'DA:(\S+)', full_text)
+    if da_match:
+        data['DA'] = format_address_field(da_match.group(1))
+
+    # 6. 'SP' (e.g., SP:006477D8)
+    sp_match = re.search(r'SP:(\S+)', full_text)
+    if sp_match:
+        data['SP'] = format_address_field(sp_match.group(1))
 
     # --- Generic Handling for simple "key:value" fields ---
     # This regex finds all words followed by a colon, then captures the
     # non-space characters that follow as the value.
     simple_pairs = re.findall(r'(\w+):(\S+)', full_text)
     for key, value in simple_pairs:
-        # Add to dict only if not one of the specially handled keys.
-        if key not in ['tsk', 't']:
+        # Add to dict only if not one of the specially handled keys.
+        if key not in ['tsk', 't', 'PC', 'DA', 'SP']: # Exclude PC, DA, SP from generic parsing
             data[key] = value
 
     return data
@@ -107,7 +115,13 @@
 
     # --- Read Input and Parse ---
     print(f"Reading from '{input_file}'...")
-    tree = ET.parse(input_file)
+    try:
+        tree = ET.parse(input_file)
+    except FileNotFoundError:
+        print(f"Error: Input file '{input_file}' not found.")
+        return
+    except ParseError as e:
+        print(f"Error: Could not parse XML from '{input_file}'. {e}")
     root = tree.getroot()
 
     log_elements = root.findall('log')
@@ -118,9 +132,12 @@
     all_logs_data = [parse_log_entry(log) for log in log_elements]
 
     # --- Write Output CSV ---
-    with open(output_file, 'w', newline='', encoding='utf-8') as f:
-        writer = csv.DictWriter(f, fieldnames=CSV_FIELDNAMES)
-        writer.writeheader()
-        writer.writerows(all_logs_data)
-    print(f"Successfully converted {len(all_logs_data)} log entries.")
-    print(f"Output saved to '{output_file}'")
+    try:
+        with open(output_file, 'w', newline='', encoding='utf-8') as f:
+            writer = csv.DictWriter(f, fieldnames=CSV_FIELDNAMES)
+            writer.writeheader()
+            writer.writerows(all_logs_data)
+        print(f"Successfully converted {len(all_logs_data)} log entries.")
+        print(f"Output saved to '{output_file}'")
+    except IOError as e:
+        print(f"Error: Could not write to output file '{output_file}'. {e}")
