I am trying to run a demo application that uses the popular LLMs through their LangChain integrations (langchain_fireworks
, langchain_google_genai
) and python 3.12 or 3.13.
If I only include the langchain_fireworks
then I’m getting a warning:
/usr/local/lib/python3.12/site-packages/google/protobuf/runtime_version.py:98: UserWarning: Protobuf gencode version 5.29.0 is exactly one major version older than the runtime version 6.31.1 at gateway/dataset.proto. Please update the gencode to avoid compatibility violations in the next runtime release.
And when the application wants to use the ChatFireworks
then it throws this error:
Traceback (most recent call last):
File “/usr/local/lib/python3.13/site-packages/gradio/queueing.py”, line 626, in process_events
response = await route_utils.call_process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
…<5 lines>…
)
^
File “/usr/local/lib/python3.13/site-packages/gradio/route_utils.py”, line 350, in call_process_api
output = await app.get_blocks().process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
…<11 lines>…
)
^
File “/usr/local/lib/python3.13/site-packages/gradio/blocks.py”, line 2239, in process_api
result = await self.call_function(
^^^^^^^^^^^^^^^^^^^^^^^^^
…<8 lines>…
)
^
File “/usr/local/lib/python3.13/site-packages/gradio/blocks.py”, line 1746, in call_function
prediction = await anyio.to_thread.run_sync( # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
fn, *processed_input, limiter=self.limiter
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File “/usr/local/lib/python3.13/site-packages/anyio/to_thread.py”, line 56, in run_sync
return await get_async_backend().run_sync_in_worker_thread(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File “/usr/local/lib/python3.13/site-packages/anyio/_backends/_asyncio.py”, line 2476, in run_sync_in_worker_thread
return await future
^^^^^^^^^^^^
File “/usr/local/lib/python3.13/site-packages/anyio/_backends/_asyncio.py”, line 967, in run
result = context.run(func, *args)
File “/usr/local/lib/python3.13/site-packages/gradio/utils.py”, line 917, in wrapper
response = f(*args, **kwargs)
File “/app/app.py”, line 508, in exec_prompt_wrapper
return next(exec_prompt(
chat_history=chat_history,
…<6 lines>…
streaming=False,
))
File “/app/app.py”, line 401, in exec_prompt
llm = get_llm(
model_name=model_name_kind,
temperature=temperature,
max_new_tokens=max_tokens,
)
File “/app/app.py”, line 147, in get_llm
return ChatFireworks(
name=“accounts/fireworks/models/llama-v3p1-405b-instruct”,
max_tokens=max_new_tokens,
temperature=temperature,
)
File “/usr/local/lib/python3.13/site-packages/langchain_core/load/serializable.py”, line 130, in
init
super().
init
(*args, **kwargs)
~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^
File “/usr/local/lib/python3.13/site-packages/pydantic/main.py”, line 253, in
init
validated_self = self.
pydantic_validator
.validate_python(data, self_instance=self)
pydantic_core._pydantic_core.ValidationError: 1 validation error for ChatFireworks
model
Field required [type=missing, input_value={‘name’: 'accounts/firewo…: 1, ‘model_kwargs’: {}}, input_type=dict]
For further information visit ``https://errors.pydantic.dev/2.11/v/missing
If I include both langchain_fireworks
and langchain_google_genai
then I’m getting an error at the second import (no matter the order):
Traceback (most recent call last):
File "/app/app.py", line 51, in <module>
from langchain_fireworks import ChatFireworks
File "/usr/local/lib/python3.12/site-packages/langchain_fireworks/__init__.py", line 1, in <module>
from langchain_fireworks.chat_models import ChatFireworks
File "/usr/local/lib/python3.12/site-packages/langchain_fireworks/chat_models.py", line 19, in <module>
from fireworks.client import AsyncFireworks, Fireworks # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/site-packages/fireworks/__init__.py", line 1, in <module>
from .llm import LLM
File "/usr/local/lib/python3.12/site-packages/fireworks/llm/__init__.py", line 1, in <module>
from .LLM import LLM
File "/usr/local/lib/python3.12/site-packages/fireworks/llm/LLM.py", line 25, in <module>
from fireworks.dataset import Dataset
File "/usr/local/lib/python3.12/site-packages/fireworks/dataset/__init__.py", line 1, in <module>
from .Dataset import Dataset
File "/usr/local/lib/python3.12/site-packages/fireworks/dataset/Dataset.py", line 22, in <module>
from fireworks.gateway import Gateway
File "/usr/local/lib/python3.12/site-packages/fireworks/gateway.py", line 15, in <module>
from fireworks.control_plane.generated.protos_grpcio.gateway.gateway_pb2_grpc import GatewayStub as SyncGatewayStub
File "/usr/local/lib/python3.12/site-packages/fireworks/control_plane/generated/protos_grpcio/gateway/gateway_pb2_grpc.py", line 41, in <module>
from ..google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2
File "/usr/local/lib/python3.12/site-packages/fireworks/control_plane/generated/protos_grpcio/google/longrunning/operations_pb2.py", line 29, in <module>
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#google/longrunning/operations.proto\x12\x12google.longrunning\x1a\x19google/protobuf/any.proto\x1a\x17google/rpc/status.proto\"\xa8\x01\n\tOperation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x08metadata\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\x12#\n\x05\x65rror\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusH\x00\x12(\n\x08response\x18\x05 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x42\x08\n\x06result\"#\n\x13GetOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\tBEZCcloud.google.com/go/longrunning/autogen/longrunningpb;longrunningpbb\x06proto3')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: Couldn't build proto file into descriptor pool: duplicate file name google/longrunning/operations.proto
How can I workaround this?
my environment:
Python 3.13.5
root@d4eb6277af52:/app# pip list
Package Version
aiofiles 24.1.0
aiohappyeyeballs 2.6.1
aiohttp 3.12.15
aiosignal 1.4.0
annotated-types 0.7.0
anthropic 0.60.0
anyio 4.10.0
asttokens 3.0.0
asyncstdlib-fw 3.13.2
attrs 25.3.0
audioop-lts 0.2.1
betterproto-fw 2.0.3
Brotli 1.1.0
cachetools 5.5.2
certifi 2025.8.3
charset-normalizer 3.4.2
click 8.2.1
cohere 5.16.2
comm 0.2.3
dataclasses-json 0.6.7
debugpy 1.8.15
decorator 5.2.1
distro 1.9.0
executing 2.2.0
fastapi 0.116.1
fastavro 1.12.0
ffmpy 0.6.1
filelock 3.18.0
filetype 1.2.0
fireworks-ai 0.17.5
frozenlist 1.7.0
fsspec 2025.7.0
google-ai-generativelanguage 0.6.18
google-api-core 2.25.1
google-auth 2.40.3
googleapis-common-protos 1.70.0
gradio 5.39.0
gradio_client 1.11.0
greenlet 3.2.3
groovy 0.1.2
grpcio 1.74.0
grpcio-status 1.74.0
grpclib 0.4.8
h11 0.16.0
h2 4.2.0
hf-xet 1.1.5
hpack 4.1.0
httpcore 1.0.9
httpx 0.28.1
httpx-sse 0.4.0
httpx-ws 0.7.2
huggingface-hub 0.34.3
hyperframe 6.1.0
idna 3.10
ipykernel 6.30.0
ipython 9.4.0
ipython_pygments_lexers 1.1.1
ipywidgets 8.1.7
jedi 0.19.2
Jinja2 3.1.6
jiter 0.10.0
jsonpatch 1.33
jsonpointer 3.0.0
jupyter_client 8.6.3
jupyter_core 5.8.1
jupyterlab_widgets 3.0.15
langchain 0.3.27
langchain-anthropic 0.3.18
langchain-cohere 0.4.5
langchain-community 0.3.27
langchain-core 0.3.72
langchain-fireworks 0.3.0
langchain-google-genai 2.1.8
langchain-mistralai 0.2.11
langchain-openai 0.3.28
langchain-text-splitters 0.3.9
langsmith 0.4.10
markdown-it-py 3.0.0
MarkupSafe 3.0.2
marshmallow 3.26.1
matplotlib-inline 0.1.7
mdurl 0.1.2
mmh3 5.2.0
multidict 6.6.3
mypy_extensions 1.1.0
nest-asyncio 1.6.0
numpy 2.3.2
openai 1.98.0
orjson 3.11.1
packaging 25.0
pandas 2.3.1
parso 0.8.4
pexpect 4.9.0
pillow 11.3.0
pip 25.1.1
platformdirs 4.3.8
prompt_toolkit 3.0.51
propcache 0.3.2
proto-plus 1.26.1
protobuf 6.31.1
psutil 7.0.0
ptyprocess 0.7.0
pure_eval 0.2.3
pyasn1 0.6.1
pyasn1_modules 0.4.2
pydantic 2.11.7
pydantic_core 2.33.2
pydantic-settings 2.10.1
pydub 0.25.1
Pygments 2.19.2
python-dateutil 2.9.0.post0
python-dotenv 1.1.1
python-multipart 0.0.20
pytz 2025.2
PyYAML 6.0.2
pyzmq 27.0.1
regex 2025.7.34
requests 2.32.4
requests-toolbelt 1.0.0
rich 14.1.0
rsa 4.9.1
ruff 0.9.10
safehttpx 0.1.6
semantic-version 2.10.0
shellingham 1.5.4
six 1.17.0
sniffio 1.3.1
SQLAlchemy 2.0.42
stack-data 0.6.3
starlette 0.47.2
tenacity 9.1.2
tiktoken 0.9.0
tokenizers 0.21.4
tomlkit 0.13.3
tornado 6.5.1
tqdm 4.67.1
traitlets 5.14.3
typer 0.16.0
types-PyYAML 6.0.12.20250516
types-requests 2.32.4.20250611
typing_extensions 4.14.1
typing-inspect 0.9.0
typing-inspection 0.4.1
tzdata 2025.2
urllib3 2.5.0
uvicorn 0.35.0
wcwidth 0.2.13
websockets 15.0.1
widgetsnbextension 4.0.14
wsproto 1.2.0
yarl 1.20.1
zstandard 0.23.0
Thank you for any suggestion.
Laszlo