Description: <short summary of the patch>
 TODO: Put a short summary on the line above and replace this paragraph
 with a longer explanation of this change. Complete the meta-information
 with other relevant fields (see below for details). To make it easier, the
 information below has been extracted from the changelog. Adjust it or drop
 it.
 .
 nohang (0.2.0-2) unstable; urgency=medium
 .
   * Ack NMU.
   * Bump standards version to 4.7.0.
Author: Alex Myczko <tar@debian.org>

---
The information above should follow the Patch Tagging Guidelines, please
checkout https://dep.debian.net/deps/dep3/ to learn about the format. Here
are templates for supplementary fields that you might want to add:

Origin: (upstream|backport|vendor|other), (<patch-url>|commit:<commit-id>)
Bug: <upstream-bugtracker-url>
Bug-Debian: https://bugs.debian.org/<bugnumber>
Bug-Ubuntu: https://launchpad.net/bugs/<bugnumber>
Forwarded: (no|not-needed|<patch-forwarded-url>)
Applied-Upstream: <version>, (<commit-url>|commit:<commid-id>)
Reviewed-By: <name and email of someone who approved/reviewed the patch>
Last-Update: 2024-07-01

--- nohang-0.2.0.orig/src/nohang
+++ nohang-0.2.0/src/nohang
@@ -7,7 +7,7 @@ from time import sleep, monotonic
 from operator import itemgetter
 from sys import stdout, stderr, argv, exit
 from re import search
-from sre_constants import error as invalid_re
+from re import error as invalid_re
 from signal import signal, SIGKILL, SIGTERM, SIGINT, SIGQUIT, SIGHUP, SIGUSR1
 
 
@@ -165,41 +165,49 @@ def memload():
             os.kill(self_pid, SIGUSR1)
 
 
-def arcstats():
+def parse_zfs_arcstats():
     """
+    Parses '/proc/spl/kstat/zfs/arcstats'.
+    Returns a dictionary with 'name' as keys and 'data' as values.
     """
-    with open(arcstats_path, 'rb') as f:
-        a_list = f.read().decode().split('\n')
+    parsed_data = {}
 
-    for n, line in enumerate(a_list):
-        if n == c_min_index:
-            c_min = int(line.rpartition(' ')[2]) / 1024
-        elif n == size_index:
-            size = int(line.rpartition(' ')[2]) / 1024
+    with open(arcstats_path, 'r') as as_file:
+        lines = iter(as_file.readlines())
 
-        elif n == arc_meta_used_index:
-            arc_meta_used = int(line.rpartition(' ')[2]) / 1024
-
-        elif n == arc_meta_min_index:
-            arc_meta_min = int(line.rpartition(' ')[2]) / 1024
-
-        else:
-            continue
-
-    c_rec = size - c_min
-
-    if c_rec < 0:
-        c_rec = 0
-
-    meta_rec = arc_meta_used - arc_meta_min
-
-    if meta_rec < 0:
-        meta_rec = 0
-    zfs_available = c_rec + meta_rec
-
-    # return c_min, size, arc_meta_used, arc_meta_min, zfs_available
+        # consume lines until the header row:
+        for line in lines:
+            if 'name' in line and 'data' in line:
+                break
 
-    return zfs_available
+        # Continue iterating over the remaining lines
+        for line in lines:
+            if line.strip():
+                parts = line.split()
+                name = parts[0]
+                data_type = parts[1]
+                data = parts[2]
+                if data_type == '4':
+                    data = int(data)
+                parsed_data[name] = data
+
+    return parsed_data
+
+
+def zfs_arc_available():
+    """returns how many KiB of the zfs ARC are reclaimable"""
+    stats = parse_zfs_arcstats()
+
+    c_rec = max(stats['size'] - stats['c_min'], 0)
+
+    # old zfs: consider arc_meta_used, arc_meta_min
+    if 'arc_meta_used' in stats and 'arc_meta_min' in stats:
+        meta_rec = max(stats['arc_meta_used'] - stats['arc_meta_min'], 0)
+        return (c_rec + meta_rec) / 1024
+
+    # new zfs: metadata is no longer accounted for separately,
+    # https://github.com/openzfs/zfs/commit/a8d83e2a24de6419dc58d2a7b8f38904985726cb
+    return c_rec / 1024
 
 
 def exe(cmd):
@@ -209,7 +217,7 @@ def exe(cmd):
 
     cmd_num_dict['cmd_num'] += 1
     cmd_num = cmd_num_dict['cmd_num']
-    th_name = threading.current_thread().getName()
+    th_name = threading.current_thread().name
 
     log('Executing Command-{} {} with timeout {}s in {}'.format(
         cmd_num,
@@ -237,11 +245,11 @@ def start_thread(func, *a, **k):
     """ run function in a new thread
     """
     th = threading.Thread(target=func, args=a, kwargs=k, daemon=True)
-    th_name = th.getName()
+    th_name = th.name
 
     if debug_threading:
         log('Starting {} from {}'.format(
-            th_name, threading.current_thread().getName()
+            th_name, threading.current_thread().name
         ))
 
     try:
@@ -350,7 +358,7 @@ def pop(cmd):
     else:
         wait_time = 30
 
-    th_name = threading.current_thread().getName()
+    th_name = threading.current_thread().name
 
     log('Executing Command-{} {} with timeout {}s in {}'.format(
         cmd_num,
@@ -1341,7 +1349,7 @@ def check_mem_and_swap():
     sf = int(m_list[swap_free_index].split(':')[1])
 
     if ZFS:
-        ma += arcstats()
+        ma += zfs_arc_available()
 
     return ma, st, sf
 
@@ -1369,7 +1377,7 @@ def meminfo():
     md['available'] = mem_available
 
     if ZFS:
-        z = arcstats()
+        z = zfs_arc_available()
         mem_available += z
 
     md['shared'] = shmem
@@ -3695,7 +3703,7 @@ if 'max_victim_ancestry_depth' in config
         errprint('Invalid max_victim_ancestry_depth value, not integer\nExit')
         exit(1)
     if max_victim_ancestry_depth < 1:
-        errprint('Invalud max_victim_ancestry_depth value\nExit')
+        errprint('Invalid max_victim_ancestry_depth value\nExit')
         exit(1)
 else:
     missing_config_key('max_victim_ancestry_depth')
@@ -3958,29 +3966,6 @@ if check_kmsg:
 if ZFS:
     log('WARNING: ZFS found. Available memory will not be calculated '
         'correctly (issue#89)')
-    try:
-        # find indexes
-        with open(arcstats_path, 'rb') as f:
-            a_list = f.read().decode().split('\n')
-        for n, line in enumerate(a_list):
-            if line.startswith('c_min '):
-                c_min_index = n
-
-            elif line.startswith('size '):
-                size_index = n
-
-            elif line.startswith('arc_meta_used '):
-                arc_meta_used_index = n
-
-            elif line.startswith('arc_meta_min '):
-                arc_meta_min_index = n
-
-            else:
-                continue
-    except Exception as e:
-        log(e)
-        ZFS = False
-
 
 while True:
 
