diff --git a/containers/ollama/build_container.sh b/containers/ollama/build_container.sh index e5815ee0..53db8a66 100755 --- a/containers/ollama/build_container.sh +++ b/containers/ollama/build_container.sh @@ -4,7 +4,7 @@ # TODO: MP should pull a CUDA enabled version instead? # specify version -VERSION="0.13.2" +VERSION="0.17.7" TAG=${VERSION} IMAGE_NAME="ollama" MODULE_FOLDER="testing/ollama" diff --git a/modules/m3/applications/testing/ollama/0.13.2.lua b/modules/m3/applications/testing/ollama/0.13.2.lua new file mode 100644 index 00000000..b10b0acc --- /dev/null +++ b/modules/m3/applications/testing/ollama/0.13.2.lua @@ -0,0 +1,22 @@ + +help([[ +Name: Ollama +Version: 0.13.2 +Website: https://ollama.com/ + +Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine + +]]) +whatis("Name: Ollama") +whatis("Version: ollama:0.13.2") +whatis("Category: Ollama") +whatis("URL: https://hub.docker.com/r/ollama/ollama") +whatis("Description: Provides access to Ollama through a container built with Apptainer") +family("Ollama") + +always_load('apptainer') +local sif_file = '/hpc/m3/containers/ollama/ollama_0.13.2.sif' + +setenv('CONTAINER_IMAGE', sif_file) +source_sh("bash", "/hpc/m3/hpc_docs/utils/ollama_scripts/ollama.sh") + diff --git a/modules/m3/applications/testing/ollama/0.15.1.lua b/modules/m3/applications/testing/ollama/0.15.1.lua new file mode 100644 index 00000000..9782b81e --- /dev/null +++ b/modules/m3/applications/testing/ollama/0.15.1.lua @@ -0,0 +1,22 @@ + +help([[ +Name: Ollama +Version: 0.15.1 +Website: https://ollama.com/ + +Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine + +]]) +whatis("Name: Ollama") +whatis("Version: ollama:0.15.1") +whatis("Category: Ollama") +whatis("URL: https://hub.docker.com/r/ollama/ollama") +whatis("Description: Provides access to Ollama through a container built with Apptainer") +family("Ollama") + +always_load('apptainer') +local sif_file = '/hpc/m3/containers/ollama/ollama_0.15.1.sif' + +setenv('CONTAINER_IMAGE', sif_file) +source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh") + diff --git a/modules/m3/applications/testing/ollama/0.17.7.lua b/modules/m3/applications/testing/ollama/0.17.7.lua new file mode 100644 index 00000000..69334dad --- /dev/null +++ b/modules/m3/applications/testing/ollama/0.17.7.lua @@ -0,0 +1,22 @@ + +help([[ +Name: Ollama +Version: 0.17.7 +Website: https://ollama.com/ + +Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine + +]]) +whatis("Name: Ollama") +whatis("Version: ollama:0.17.7") +whatis("Category: Ollama") +whatis("URL: https://hub.docker.com/r/ollama/ollama") +whatis("Description: Provides access to Ollama through a container built with Apptainer") +family("Ollama") + +always_load('apptainer') +local sif_file = '/hpc/m3/containers/ollama/ollama_0.17.7.sif' + +setenv('CONTAINER_IMAGE', sif_file) +source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh") + diff --git a/modules/mp/apps/testing/ollama/0.13.2.lua b/modules/mp/apps/testing/ollama/0.13.2.lua new file mode 100644 index 00000000..b10b0acc --- /dev/null +++ b/modules/mp/apps/testing/ollama/0.13.2.lua @@ -0,0 +1,22 @@ + +help([[ +Name: Ollama +Version: 0.13.2 +Website: https://ollama.com/ + +Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine + +]]) +whatis("Name: Ollama") +whatis("Version: ollama:0.13.2") +whatis("Category: Ollama") +whatis("URL: https://hub.docker.com/r/ollama/ollama") +whatis("Description: Provides access to Ollama through a container built with Apptainer") +family("Ollama") + +always_load('apptainer') +local sif_file = '/hpc/m3/containers/ollama/ollama_0.13.2.sif' + +setenv('CONTAINER_IMAGE', sif_file) +source_sh("bash", "/hpc/m3/hpc_docs/utils/ollama_scripts/ollama.sh") + diff --git a/modules/mp/apps/testing/ollama/0.15.1.lua b/modules/mp/apps/testing/ollama/0.15.1.lua new file mode 100644 index 00000000..9782b81e --- /dev/null +++ b/modules/mp/apps/testing/ollama/0.15.1.lua @@ -0,0 +1,22 @@ + +help([[ +Name: Ollama +Version: 0.15.1 +Website: https://ollama.com/ + +Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine + +]]) +whatis("Name: Ollama") +whatis("Version: ollama:0.15.1") +whatis("Category: Ollama") +whatis("URL: https://hub.docker.com/r/ollama/ollama") +whatis("Description: Provides access to Ollama through a container built with Apptainer") +family("Ollama") + +always_load('apptainer') +local sif_file = '/hpc/m3/containers/ollama/ollama_0.15.1.sif' + +setenv('CONTAINER_IMAGE', sif_file) +source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh") + diff --git a/modules/mp/apps/testing/ollama/0.17.7.lua b/modules/mp/apps/testing/ollama/0.17.7.lua new file mode 100644 index 00000000..69334dad --- /dev/null +++ b/modules/mp/apps/testing/ollama/0.17.7.lua @@ -0,0 +1,22 @@ + +help([[ +Name: Ollama +Version: 0.17.7 +Website: https://ollama.com/ + +Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine + +]]) +whatis("Name: Ollama") +whatis("Version: ollama:0.17.7") +whatis("Category: Ollama") +whatis("URL: https://hub.docker.com/r/ollama/ollama") +whatis("Description: Provides access to Ollama through a container built with Apptainer") +family("Ollama") + +always_load('apptainer') +local sif_file = '/hpc/m3/containers/ollama/ollama_0.17.7.sif' + +setenv('CONTAINER_IMAGE', sif_file) +source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh") +