Is there any adjustment when calling openai embedding with OpenAIEmbeddings that causes the return vector to change? I ran the Alibaba-NLP gte-multilingual-base model on text-embeddings-inference

from langchain_openai import OpenAIEmbeddings

embeddings_api_1 = OpenAIEmbeddings(
base_url = "http://10.0.132.45:9314/v1",
model = “/data”,
api_key = “123”,
)

from langchain_huggingface.embeddings import HuggingFaceEndpointEmbeddings

embeddings_api_2 = HuggingFaceEndpointEmbeddings(
model = “http://10.0.132.45:9314”,
)

from openai import OpenAI

client = OpenAI(
base_url = "http://10.0.132.45:9314/v1",
api_key = “123”,
)

response = client.embeddings.create(
model=“/data”,
input=“Nếu trong một ngày NLĐ vừa có ký hiệu V vừa có P thì hệ thống sẽ tính lương như thế nào?”,
encoding_format=“float”
)