Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions src/palace/manager/celery/opds.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ def opds_import_task[FeedType](
)

if not import_result:
task.log.info("Import failed, aborting task.")
task.log.info(
f"Import failed, aborting task for collection '{collection.name}' (id={collection.id})"
)
return None

# If a post-import hook is provided, call it with the import result.
Expand All @@ -81,8 +83,11 @@ def opds_import_task[FeedType](

if not should_continue:
task.log.info(
f"Found unchanged publications in feed, stopping import without harvesting the rest of the feed."
f"Found unchanged publications in feed, stopping import without harvesting the rest of the feed"
f" for collection '{collection.name}' (id={collection.id}) "
)

task.log.info("Import complete.")
task.log.info(
f"Import complete for collection '{collection.name}' (id={collection.id})"
)
return identifier_set
8 changes: 6 additions & 2 deletions src/palace/manager/celery/tasks/opds_odl.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,9 @@ def import_collection(
)

if not import_result:
task.log.info("Import failed, aborting task.")
task.log.info(
f"Import failed, aborting task for collection '{collection.name}' (id={collection_id})."
)
return

next_link = import_result.next_url
Expand All @@ -360,4 +362,6 @@ def import_collection(
)
)

task.log.info("Import complete.")
task.log.info(
f"Import complete for collection '{collection.name}' (id={collection_id})."
)
9 changes: 6 additions & 3 deletions src/palace/manager/integration/license/opds/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,8 @@ def import_feed(
feed = self._fetch_feed(feed_url)
except ValueError as e:
self.log.error(
f"Failed to fetch or parse the feed from '{feed_url}': {e}",
f"Failed to fetch or parse the feed from '{feed_url}' "
f"for collection '{collection.name}' (id={collection.id}): {e}",
exc_info=e,
)
return False
Expand Down Expand Up @@ -405,11 +406,13 @@ def import_feed(

if failures:
self.log.error(
f"Failed to import {len(failures)} publications from '{feed_url}'."
f"Failed to import {len(failures)} publications from '{feed_url}' "
f"for collection '{collection.name}' (id={collection.id})."
)
for failure in failures:
self.log.error(
f"Failed to import publication: {failure.identifier} ({failure.title})"
f"Failed to import publication: {failure.identifier} ({failure.title}) "
f"for collection '{collection.name}' (id={collection.id})"
f" - {failure.error_message}: {failure.error}",
exc_info=failure.error,
extra={"palace_publication_data": failure.publication_data},
Expand Down
5 changes: 4 additions & 1 deletion tests/manager/celery/tasks/test_opds1.py
Original file line number Diff line number Diff line change
Expand Up @@ -579,6 +579,9 @@ def test_parse_identifier(
opds_files_fixture: OPDSFilesFixture,
caplog: pytest.LogCaptureFixture,
) -> None:

collection = opds1_import_fixture.collection

# Normal case, we just call out to Identifier.parse_urn
expected_identifier = IdentifierData(
type=Identifier.URI, identifier="https://example.com/12345"
Expand Down Expand Up @@ -607,7 +610,7 @@ def test_parse_identifier(
)
opds1_import_fixture.run_import_task(apply=True)
assert (
"https://unglue.it/api/id/work/7775/ (Warbreaker) - "
f"https://unglue.it/api/id/work/7775/ (Warbreaker) for collection '{collection.name}' (id={collection.id}) - "
"Could not extract an identifier from the publication: My god, it's full of stars"
in caplog.text
)
Expand Down
6 changes: 5 additions & 1 deletion tests/manager/celery/tasks/test_opds2.py
Original file line number Diff line number Diff line change
Expand Up @@ -903,6 +903,7 @@ def test_import_multiple_pages(
3. Re-import with force continues through all pages despite unchanged publications
"""
caplog.set_level(LogLevel.info)
collection = opds2_import_fixture.collection

# First import: Import both pages successfully
# feed2 has a next link to feed, which has no next link.
Expand Down Expand Up @@ -949,7 +950,10 @@ def test_import_multiple_pages(
# Should NOT see the log message about stopping due to unchanged publications
assert "Found unchanged publications in feed" not in caplog.text
# Should see the log message about completing the import
assert "Import complete." in caplog.text
assert (
f"Import complete for collection '{collection.name}' (id={collection.id})"
in caplog.text
)
# Should have 6 tasks queued (3 from each page) even though publications are unchanged
assert len(apply_task_fixture.apply_queue) == 6

Expand Down
3 changes: 2 additions & 1 deletion tests/manager/celery/tasks/test_opds_odl.py
Original file line number Diff line number Diff line change
Expand Up @@ -867,7 +867,8 @@ def test_import(

# 4. Make sure that the failure is covered
assert (
"Failed to import publication: urn:isbn:9781234567897 (None) - Error validating publication: 2 validation errors"
f"Failed to import publication: urn:isbn:9781234567897 (None) for collection '{collection.name}' "
f"(id={collection.id}) - Error validating publication: 2 validation errors"
in caplog.text
)

Expand Down