diff --git a/README.md b/README.md
index b84821b..a78a45d 100644
--- a/README.md
+++ b/README.md
@@ -181,6 +181,14 @@ bob.initialize(
model='Phi-3.5-mini-instruct')
```
+Alternatively, you can specify the endpoint directly, too:
+```
+bob.initialize(
+ endpoint='https://models.inference.ai.azure.com',
+ model='Phi-3.5-mini-instruct')
+```
+
+
### Using custom endpoints
Custom endpoints can be used as well if they support the OpenAI API. Examples are [blablador](https://login.helmholtz.de/oauth2-as/oauth2-authz-web-entry) and [ollama](https://ollama.com/).
diff --git a/demo/azure.ipynb b/demo/azure.ipynb
new file mode 100644
index 0000000..a3eef08
--- /dev/null
+++ b/demo/azure.ipynb
@@ -0,0 +1,176 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "b95f8f8a-c13f-4002-a969-12e33bc7797a",
+ "metadata": {},
+ "source": [
+ "# Azure\n",
+ "You can also access models on [Microsoft Azure](https://azure.microsoft.com/).\n",
+ "You need to sign up at the platform and store API key to your environment as `AZURE_API_KEY`.\n",
+ "You can then access models like shown below."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "d5779abe-11fb-4b7e-928e-061a9a6b23cb",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from bia_bob import bob"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "a2f361fa-ab46-458b-aebe-9be46cc66c76",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "
\n",
+ " This notebook may contain text, code and images generated by artificial intelligence.\n",
+ " Used model: meta-llama-3.1-405b-instruct, vision model: None, endpoint: https://models.inference.ai.azure.com, bia-bob version: 0.23.0.. Do not enter sensitive or private information and verify generated contents according to good scientific practice. Read more:
https://github.com/haesleinhuepf/bia-bob#disclaimer\n",
+ "
\n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "bob.initialize(endpoint=\"azure\", model=\"meta-llama-3.1-405b-instruct\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "6b3e4eb3-571a-4c1e-b1dc-bb3c2ea32523",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%bob Please load blobs.tif and print out its dimensions."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "e6cb3b4a-33cd-44f3-afb4-5a6d4d667ccf",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import skimage\n",
+ "\n",
+ "# Load the image\n",
+ "image = skimage.io.imread(\"blobs.tif\")\n",
+ "\n",
+ "# Print out the dimensions of the image\n",
+ "print(image.shape)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4707a5d0-8ed0-40b7-b1aa-ef9937929c01",
+ "metadata": {},
+ "source": [
+ "Alternatively, you can also specify the endpoint yourself:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "f76239eb-c3b7-4b69-8eaf-bdc6fb198c16",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " This notebook may contain text, code and images generated by artificial intelligence.\n",
+ " Used model: Phi-3-medium-128k-instruct, vision model: None, endpoint: https://models.inference.ai.azure.com, bia-bob version: 0.23.0.. Do not enter sensitive or private information and verify generated contents according to good scientific practice. Read more:
https://github.com/haesleinhuepf/bia-bob#disclaimer\n",
+ "
\n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "import os\n",
+ "\n",
+ "bob.initialize(endpoint=\"https://models.inference.ai.azure.com\", \n",
+ " api_key=os.environ.get('AZURE_API_KEY'),\n",
+ " model=\"Phi-3-medium-128k-instruct\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "48c7250a-3b22-4d3e-b0f8-df498db1e8ce",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%bob Please load blobs.tif and print out its dimensions."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "527de356-1250-42ce-abfc-a1b5385d7f06",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from PIL import Image\n",
+ "\n",
+ "# Load the TIFF image\n",
+ "with Image.open('blobs.tif') as img:\n",
+ " # Get the dimensions\n",
+ " width, height = img.size\n",
+ " \n",
+ " # Print the dimensions\n",
+ " print(f\"Image dimensions: {width} x {height}\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "10095188-b106-4fab-91a6-e5c32c0a55ab",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.9"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}