Compare commits
5 Commits
5ba0fafbc1
...
new-archit
| Author | SHA1 | Date | |
|---|---|---|---|
| 26cba97cbd | |||
| 175bb7cbdc | |||
| bf0e70e9d9 | |||
| cb4104e59a | |||
| 570639e988 |
@@ -13,12 +13,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- complete test suite revamp
|
- complete test suite revamp
|
||||||
- updated download cache naming to use `Author_Title` format with normalized separators
|
- updated download cache naming to use `Author_Title` format with normalized separators
|
||||||
- optimized library pagination fetch with bounded concurrent scheduling
|
- optimized library pagination fetch with bounded concurrent scheduling
|
||||||
|
- adjusted library first-page probe order to prefer larger page sizes for medium libraries
|
||||||
|
- removed eager search cache priming during library load to reduce startup work
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
- reused library metadata for download filename generation to avoid `Unknown-Author_Unknown-Title` when title/author are already known in the UI
|
- reused library metadata for download filename generation to avoid `Unknown-Author_Unknown-Title` when title/author are already known in the UI
|
||||||
- fixed Audible last-position request parameter handling after library client refactor
|
- fixed Audible last-position request parameter handling after library client refactor
|
||||||
- added retry behavior and explicit size diagnostics when downloaded files are too small
|
- added retry behavior and explicit size diagnostics when downloaded files are too small
|
||||||
|
- prevented table rendering crashes by generating unique row keys instead of using title-only keys
|
||||||
|
|
||||||
## [0.1.6] - 2026-02-16
|
## [0.1.6] - 2026-02-16
|
||||||
|
|
||||||
|
|||||||
@@ -16,16 +16,15 @@ class AppLibraryMixin:
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
all_items = self.library_client.fetch_all_items(
|
all_items = self.library_client.fetch_all_items(self._thread_status_update)
|
||||||
self._thread_status_update)
|
|
||||||
self.call_from_thread(self.on_library_loaded, all_items)
|
self.call_from_thread(self.on_library_loaded, all_items)
|
||||||
except (OSError, ValueError, KeyError) as exc:
|
except (OSError, ValueError, KeyError) as exc:
|
||||||
self.call_from_thread(self.on_library_error, str(exc))
|
self.call_from_thread(self.on_library_error, str(exc))
|
||||||
|
|
||||||
def on_library_loaded(self, items: list[LibraryItem]) -> None:
|
def on_library_loaded(self, items: list[LibraryItem]) -> None:
|
||||||
|
"""Store fetched items and refresh the active library view."""
|
||||||
self.all_items = items
|
self.all_items = items
|
||||||
self._search_text_cache.clear()
|
self._search_text_cache.clear()
|
||||||
self._prime_search_cache(items)
|
|
||||||
self.update_status(f"Loaded {len(items)} books")
|
self.update_status(f"Loaded {len(items)} books")
|
||||||
if self.show_all_mode:
|
if self.show_all_mode:
|
||||||
self.show_all()
|
self.show_all()
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ from textual.widgets import DataTable, Static
|
|||||||
|
|
||||||
class AppTableMixin:
|
class AppTableMixin:
|
||||||
def _populate_table(self, items: list[LibraryItem]) -> None:
|
def _populate_table(self, items: list[LibraryItem]) -> None:
|
||||||
|
"""Render library items into the table with stable unique row keys."""
|
||||||
table = self.query_one("#library_table", DataTable)
|
table = self.query_one("#library_table", DataTable)
|
||||||
table.clear()
|
table.clear()
|
||||||
|
|
||||||
@@ -22,18 +23,41 @@ class AppTableMixin:
|
|||||||
self.update_status("No books found.")
|
self.update_status("No books found.")
|
||||||
return
|
return
|
||||||
|
|
||||||
for item in items:
|
used_keys: set[str] = set()
|
||||||
|
for index, item in enumerate(items):
|
||||||
title, author, runtime, progress, downloaded = format_item_as_row(
|
title, author, runtime, progress, downloaded = format_item_as_row(
|
||||||
item, self.library_client, self.download_manager
|
item, self.library_client, self.download_manager
|
||||||
)
|
)
|
||||||
table.add_row(title, author, runtime,
|
row_key = self._build_row_key(item, title, index, used_keys)
|
||||||
progress, downloaded, key=title)
|
table.add_row(title, author, runtime, progress, downloaded, key=row_key)
|
||||||
|
|
||||||
self.current_items = items
|
self.current_items = items
|
||||||
status = self.query_one("#status", Static)
|
status = self.query_one("#status", Static)
|
||||||
status.display = False
|
status.display = False
|
||||||
self._apply_column_widths(table)
|
self._apply_column_widths(table)
|
||||||
|
|
||||||
|
def _build_row_key(
|
||||||
|
self,
|
||||||
|
item: LibraryItem,
|
||||||
|
title: str,
|
||||||
|
index: int,
|
||||||
|
used_keys: set[str],
|
||||||
|
) -> str:
|
||||||
|
"""Return a unique table row key derived from ASIN when available."""
|
||||||
|
asin = self.library_client.extract_asin(item) if self.library_client else None
|
||||||
|
base_key = asin or f"{title}#{index}"
|
||||||
|
if base_key not in used_keys:
|
||||||
|
used_keys.add(base_key)
|
||||||
|
return base_key
|
||||||
|
|
||||||
|
suffix = 2
|
||||||
|
candidate = f"{base_key}#{suffix}"
|
||||||
|
while candidate in used_keys:
|
||||||
|
suffix += 1
|
||||||
|
candidate = f"{base_key}#{suffix}"
|
||||||
|
used_keys.add(candidate)
|
||||||
|
return candidate
|
||||||
|
|
||||||
def _refresh_table(self) -> None:
|
def _refresh_table(self) -> None:
|
||||||
if self.current_items:
|
if self.current_items:
|
||||||
self._populate_table(self.current_items)
|
self._populate_table(self.current_items)
|
||||||
@@ -79,11 +103,9 @@ class AppTableMixin:
|
|||||||
items = self.all_items
|
items = self.all_items
|
||||||
|
|
||||||
if self.filter_text:
|
if self.filter_text:
|
||||||
items = filter_items(items, self.filter_text,
|
items = filter_items(items, self.filter_text, self._get_search_text)
|
||||||
self._get_search_text)
|
|
||||||
self._populate_table(items)
|
self._populate_table(items)
|
||||||
self.update_status(
|
self.update_status(f"Filter: '{self.filter_text}' ({len(items)} books)")
|
||||||
f"Filter: '{self.filter_text}' ({len(items)} books)")
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if not self.show_all_mode and self.library_client:
|
if not self.show_all_mode and self.library_client:
|
||||||
@@ -97,6 +119,7 @@ class AppTableMixin:
|
|||||||
if cached is not None:
|
if cached is not None:
|
||||||
return cached
|
return cached
|
||||||
from ..library import build_search_text
|
from ..library import build_search_text
|
||||||
|
|
||||||
search_text = build_search_text(item, self.library_client)
|
search_text = build_search_text(item, self.library_client)
|
||||||
self._search_text_cache[cache_key] = search_text
|
self._search_text_cache[cache_key] = search_text
|
||||||
return search_text
|
return search_text
|
||||||
|
|||||||
@@ -17,10 +17,7 @@ class LibraryClientFetchMixin:
|
|||||||
self, on_progress: StatusCallback | None = None
|
self, on_progress: StatusCallback | None = None
|
||||||
) -> list[LibraryItem]:
|
) -> list[LibraryItem]:
|
||||||
"""Fetch all library items from the API."""
|
"""Fetch all library items from the API."""
|
||||||
response_groups = (
|
response_groups = "contributors,product_attrs,product_desc,is_finished,listening_status,percent_complete"
|
||||||
"contributors,media,product_attrs,product_desc,product_details,"
|
|
||||||
"is_finished,listening_status,percent_complete"
|
|
||||||
)
|
|
||||||
return self._fetch_all_pages(response_groups, on_progress)
|
return self._fetch_all_pages(response_groups, on_progress)
|
||||||
|
|
||||||
def _fetch_page(
|
def _fetch_page(
|
||||||
@@ -75,8 +72,7 @@ class LibraryClientFetchMixin:
|
|||||||
if len(first_page_items) < page_size:
|
if len(first_page_items) < page_size:
|
||||||
return all_items
|
return all_items
|
||||||
|
|
||||||
estimated_pages = self._estimate_total_pages(
|
estimated_pages = self._estimate_total_pages(library_response, page_size)
|
||||||
library_response, page_size)
|
|
||||||
page_results = self._fetch_remaining_pages(
|
page_results = self._fetch_remaining_pages(
|
||||||
response_groups=response_groups,
|
response_groups=response_groups,
|
||||||
page_size=page_size,
|
page_size=page_size,
|
||||||
|
|||||||
34
tests/app/test_app_table_row_keys.py
Normal file
34
tests/app/test_app_table_row_keys.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from auditui.app.table import AppTableMixin
|
||||||
|
|
||||||
|
|
||||||
|
class DummyTableApp(AppTableMixin):
|
||||||
|
"""Minimal host exposing library client for row key helper tests."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize a fake library client with ASIN extraction."""
|
||||||
|
self.library_client = type(
|
||||||
|
"Library",
|
||||||
|
(),
|
||||||
|
{"extract_asin": lambda self, item: item.get("asin")},
|
||||||
|
)()
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_row_key_prefers_asin_and_remains_unique() -> None:
|
||||||
|
"""Ensure duplicate ASINs receive deterministic unique key suffixes."""
|
||||||
|
app = DummyTableApp()
|
||||||
|
used: set[str] = set()
|
||||||
|
item = {"asin": "ASIN1"}
|
||||||
|
first = app._build_row_key(item, "Title", 0, used)
|
||||||
|
second = app._build_row_key(item, "Title", 1, used)
|
||||||
|
assert first == "ASIN1"
|
||||||
|
assert second == "ASIN1#2"
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_row_key_falls_back_to_title_and_index() -> None:
|
||||||
|
"""Ensure missing ASIN values use title-index fallback keys."""
|
||||||
|
app = DummyTableApp()
|
||||||
|
used: set[str] = set()
|
||||||
|
key = app._build_row_key({"asin": None}, "Unknown Title", 3, used)
|
||||||
|
assert key == "Unknown Title#3"
|
||||||
Reference in New Issue
Block a user