Skip to content

Commit 8c71da3

Browse files
authored
fix: Semantic parse should only set higher temperature for google models (#277)
Setting temperature to 1.0 was to fix google specific Addresses comment https://github.com/typedef-ai/fenic/pull/253/files#r2437422061 Note that we are still setting temperature to 1.0 for openrouter google models, even if the Profile is configured to use the OpenRouter's parse engine, which is not ideal
1 parent 5e4e861 commit 8c71da3

File tree

1 file changed

+6
-1
lines changed

1 file changed

+6
-1
lines changed

src/fenic/_backends/local/semantic_operators/parse_pdf.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from fenic._backends.local.utils.doc_loader import DocFolderLoader
1414
from fenic._inference.language_model import InferenceConfiguration, LanguageModel
1515
from fenic._inference.types import LMRequestFile, LMRequestMessages
16+
from fenic.core._inference.model_catalog import ModelProvider
1617
from fenic.core._logical_plan.resolved_types import ResolvedModelAlias
1718

1819
logger = logging.getLogger(__name__)
@@ -60,15 +61,19 @@ def __init__(
6061

6162
DocFolderLoader.check_file_extensions(input.to_list(), "pdf")
6263

64+
temperature = 0.0
65+
if model.provider == ModelProvider.GOOGLE_DEVELOPER or model.provider == ModelProvider.GOOGLE_VERTEX or (model.provider == ModelProvider.OPENROUTER and model.model.split("/")[0] == "google"):
66+
temperature = 1.0 # Use a higher temperature so gemini flash models can handle complex table formatting. For more info see the conversation here: https://discuss.ai.google.dev/t/gemini-2-0-flash-has-a-weird-bug/65119/26
67+
6368
super().__init__(
6469
input=input,
6570
request_sender=CompletionOnlyRequestSender(
6671
model=model,
6772
operator_name="semantic.parse_pdf",
6873
inference_config=InferenceConfiguration(
6974
max_output_tokens=max_output_tokens,
70-
temperature=1.0, # Use a higher temperature so gemini flash models can handle complex table formatting. For more info see the conversation here: https://discuss.ai.google.dev/t/gemini-2-0-flash-has-a-weird-bug/65119/26
7175
model_profile=model_alias.profile if model_alias else None,
76+
temperature=temperature,
7277
request_timeout=request_timeout,
7378
),
7479
),

0 commit comments

Comments
 (0)