mirror of
https://github.com/OpenHands/OpenHands.git
synced 2025-12-26 05:48:36 +08:00
chore(deps-dev): bump llama-index from 0.12.12 to 0.12.13 in the llama group (#6448)
Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
This commit is contained in:
parent
45a048f9e3
commit
19a4f1c3ec
17
poetry.lock
generated
17
poetry.lock
generated
@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
@ -3944,19 +3944,19 @@ pydantic = ">=1.10"
|
||||
|
||||
[[package]]
|
||||
name = "llama-index"
|
||||
version = "0.12.12"
|
||||
version = "0.12.13"
|
||||
description = "Interface between LLMs and your data"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
files = [
|
||||
{file = "llama_index-0.12.12-py3-none-any.whl", hash = "sha256:208f77dba5fd8268cacd3d56ec3ee33b0001d5b6ec623c5b91c755af7b08cfae"},
|
||||
{file = "llama_index-0.12.12.tar.gz", hash = "sha256:d4e475726e342b1178736ae3ed93336fe114605e86431b6dfcb454a9e1f26e72"},
|
||||
{file = "llama_index-0.12.13-py3-none-any.whl", hash = "sha256:0b285aa451ced6bd8da40df99068ac96badf8b5725c4edc29f2bce4da2ffd8bc"},
|
||||
{file = "llama_index-0.12.13.tar.gz", hash = "sha256:1e39a397dcc51dabe280c121fd8d5451a6a84595233a8b26caa54d9b7ecf9ffc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
llama-index-agent-openai = ">=0.4.0,<0.5.0"
|
||||
llama-index-cli = ">=0.4.0,<0.5.0"
|
||||
llama-index-core = ">=0.12.12,<0.13.0"
|
||||
llama-index-core = ">=0.12.13,<0.13.0"
|
||||
llama-index-embeddings-openai = ">=0.3.0,<0.4.0"
|
||||
llama-index-indices-managed-llama-cloud = ">=0.4.0"
|
||||
llama-index-llms-openai = ">=0.3.0,<0.4.0"
|
||||
@ -4001,13 +4001,13 @@ llama-index-llms-openai = ">=0.3.0,<0.4.0"
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-core"
|
||||
version = "0.12.12"
|
||||
version = "0.12.13"
|
||||
description = "Interface between LLMs and your data"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
files = [
|
||||
{file = "llama_index_core-0.12.12-py3-none-any.whl", hash = "sha256:cea491e87f65e6b775b5aef95720de302b85af1bdc67d779c4b09170a30e5b98"},
|
||||
{file = "llama_index_core-0.12.12.tar.gz", hash = "sha256:068b755bbc681731336e822f5977d7608585e8f759c6293ebd812e2659316a37"},
|
||||
{file = "llama_index_core-0.12.13-py3-none-any.whl", hash = "sha256:9708bb594bbddffd6ff0767242e49d8978d1ba60a2e62e071d9d123ad2f17e6f"},
|
||||
{file = "llama_index_core-0.12.13.tar.gz", hash = "sha256:77af0161246ce1de38efc17cb6438dfff9e9558af00bcfac7dd4d0b7325efa4b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -5469,6 +5469,7 @@ description = "Nvidia JIT LTO Library"
|
||||
optional = false
|
||||
python-versions = ">=3"
|
||||
files = [
|
||||
{file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4abe7fef64914ccfa909bc2ba39739670ecc9e820c83ccc7a6ed414122599b83"},
|
||||
{file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:06b3b9b25bf3f8af351d664978ca26a16d2c5127dbd53c0497e28d1fb9611d57"},
|
||||
{file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:fd9020c501d27d135f983c6d3e244b197a7ccad769e34df53a42e276b0e25fa1"},
|
||||
]
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user