Skip to content

Commit 0261d2f

Browse files
authored
♻️ minor fixes and tweaks to options and outputs (#336)
1 parent c334a1f commit 0261d2f

File tree

5 files changed

+12
-13
lines changed

5 files changed

+12
-13
lines changed

mindee/client_mixin.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,14 @@
66

77

88
class ClientMixin:
9-
"""Mixin for client Client V1 & V2 common static methods."""
9+
"""Mixin for clients V1 & V2 common static methods."""
1010

1111
@staticmethod
1212
def source_from_path(
1313
input_path: Union[Path, str], fix_pdf: bool = False
1414
) -> PathInput:
1515
"""
16-
Load a document from an absolute path, as a string.
16+
Load a document from a path, as a string or a `Path` object.
1717
1818
:param input_path: Path of file to open
1919
:param fix_pdf: Whether to attempt fixing PDF files before sending.

mindee/client_v2.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,13 @@ def enqueue_and_parse(
121121
if not isinstance(poll_results, JobResponse):
122122
break
123123
if poll_results.job.status == "Failed":
124-
raise MindeeError(f"Parsing failed for job {poll_results.job.id}")
124+
if poll_results.job.error:
125+
detail = poll_results.job.error.detail
126+
else:
127+
detail = "No error detail available."
128+
raise MindeeError(
129+
f"Parsing failed for job {poll_results.job.id}: {detail}"
130+
)
125131
logger.debug(
126132
"Polling server for parsing result with job id: %s",
127133
queue_result.job.id,

mindee/input/inference_predict_options.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,14 +10,9 @@ class InferencePredictOptions:
1010
"""Inference prediction options."""
1111

1212
model_id: str
13-
"""ID of the model."""
14-
full_text: bool = False
15-
"""
16-
Whether to include the full text data for async APIs.
17-
This performs a full OCR operation on the server and will increase response time and payload size.
18-
"""
13+
"""ID of the model, required."""
1914
rag: bool = False
20-
"""If set, will enable Retrieval-Augmented Generation."""
15+
"""If set to `True`, will enable Retrieval-Augmented Generation."""
2116
alias: Optional[str] = None
2217
"""Optional alias for the file."""
2318
webhook_ids: Optional[List[str]] = None

mindee/mindee_http/mindee_api_v2.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,8 +80,6 @@ def predict_async_req_post(
8080
data = {"model_id": options.model_id}
8181
url = f"{self.url_root}/inferences/enqueue"
8282

83-
if options.full_text:
84-
data["full_text_ocr"] = "true"
8583
if options.rag:
8684
data["rag"] = "true"
8785
if options.webhook_ids and len(options.webhook_ids) > 0:

mindee/parsing/v2/inference.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def __str__(self) -> str:
2929
f"Inference\n"
3030
f"#########\n"
3131
f":Model: {self.model.id}\n"
32-
f":File: {self.file}\n"
32+
f":File:\n"
3333
f" :Name: {self.file.name}\n"
3434
f" :Alias: {self.file.alias}\n\n"
3535
f"Result\n"

0 commit comments

Comments
 (0)