chore: change to API_ENDPOINT to conventional name API_BASE_URL

This commit is contained in:
AJ ONeal 2023-10-22 00:38:12 -06:00
parent c307777a6d
commit 859adee369
No known key found for this signature in database
GPG Key ID: 65118FF2A9DB590F
6 changed files with 23 additions and 22 deletions

View File

@ -7,7 +7,7 @@ jobs:
build: build:
name: 'Fmt, Lint, & Build' name: 'Fmt, Lint, & Build'
env: env:
PUBLIC_API_ENDPOINT: '' PUBLIC_API_BASE_URL: ''
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:

View File

@ -3,13 +3,13 @@
FROM node:latest FROM node:latest
WORKDIR /app WORKDIR /app
ARG OLLAMA_API_ENDPOINT='' ARG OLLAMA_API_BASE_URL=''
RUN echo $OLLAMA_API_ENDPOINT RUN echo $OLLAMA_API_BASE_URL
ENV ENV prod ENV ENV prod
ENV PUBLIC_API_ENDPOINT $OLLAMA_API_ENDPOINT ENV PUBLIC_API_BASE_URL $OLLAMA_API_BASE_URL
RUN echo $PUBLIC_API_ENDPOINT RUN echo $PUBLIC_API_BASE_URL
COPY package.json package-lock.json ./ COPY package.json package-lock.json ./
RUN npm ci RUN npm ci

View File

@ -24,7 +24,7 @@ ChatGPT-Style Web Interface for Ollama 🦙
- 💻 **Code Syntax Highlighting**: Enjoy enhanced code readability with our syntax highlighting feature. - 💻 **Code Syntax Highlighting**: Enjoy enhanced code readability with our syntax highlighting feature.
- 🔗 **External Ollama Server Connection**: You can seamlessly connect to an external Ollama server hosted on a different address by setting the environment variable during the Docker build process. Execute the following command to include the Ollama API endpoint in the Docker image: `docker build --build-arg OLLAMA_API_ENDPOINT="http://[Your Ollama URL]/api" -t ollama-webui .`. - 🔗 **External Ollama Server Connection**: You can seamlessly connect to an external Ollama server hosted on a different address by setting the environment variable during the Docker build process. Execute the following command to include the Ollama API base URL in the Docker image: `docker build --build-arg OLLAMA_API_BASE_URL='http://localhost:11343/api' -t ollama-webui .`.
- 🌟 **Continuous Updates**: We are committed to improving Ollama Web UI with regular updates and new features. - 🌟 **Continuous Updates**: We are committed to improving Ollama Web UI with regular updates and new features.
@ -49,7 +49,8 @@ OLLAMA_HOST=0.0.0.0 OLLAMA_ORIGINS=* ollama serve
### Using Docker 🐳 ### Using Docker 🐳
```bash ```bash
docker build -t ollama-webui . docker build --build-arg OLLAMA_API_BASE_URL='http://localhost:11434/api' -t ollama-webui .
docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui
``` ```
@ -57,10 +58,10 @@ Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localh
#### Connecting to Ollama on a Different Server #### Connecting to Ollama on a Different Server
If Ollama is hosted on a server other than your local machine, you can connect to it using the following environment variable: If Ollama is hosted on a server other than your local machine, change `OLLAMA_API_BASE_URL` to match:
```bash ```bash
docker build --build-arg OLLAMA_API_ENDPOINT="http://[Your Ollama URL]/api" -t ollama-webui . docker build --build-arg OLLAMA_API_BASE_URL='https://example.com/api' -t ollama-webui .
docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui
``` ```

View File

@ -1,4 +1,4 @@
PUBLIC_API_ENDPOINT="http://localhost:11434/api" PUBLIC_API_BASE_URL="http://localhost:11434/api"
OLLAMA_API_ID='my-api-token' OLLAMA_API_ID='my-api-token'
OLLAMA_API_TOKEN='xxxxxxxxxxxxxxxx' OLLAMA_API_TOKEN='xxxxxxxxxxxxxxxx'

View File

@ -1,12 +1,12 @@
import { browser } from '$app/environment'; import { browser } from '$app/environment';
import { PUBLIC_API_ENDPOINT } from '$env/static/public'; import { PUBLIC_API_BASE_URL } from '$env/static/public';
export const API_ENDPOINT = export const API_BASE_URL =
PUBLIC_API_ENDPOINT === '' PUBLIC_API_BASE_URL === ''
? browser ? browser
? `http://${location.hostname}:11434/api` ? `http://${location.hostname}:11434/api`
: `http://localhost:11434/api` : `http://localhost:11434/api`
: PUBLIC_API_ENDPOINT; : PUBLIC_API_BASE_URL;
// Source: https://kit.svelte.dev/docs/modules#$env-static-public // Source: https://kit.svelte.dev/docs/modules#$env-static-public
// This feature, akin to $env/static/private, exclusively incorporates environment variables // This feature, akin to $env/static/private, exclusively incorporates environment variables
@ -14,6 +14,6 @@ export const API_ENDPOINT =
// Consequently, these variables can be securely exposed to client-side code. // Consequently, these variables can be securely exposed to client-side code.
// Example of the .env configuration: // Example of the .env configuration:
// OLLAMA_API_ENDPOINT="http://localhost:11434/api" // OLLAMA_API_BASE_URL="http://localhost:11434/api"
// # Public // # Public
// PUBLIC_API_ENDPOINT=$OLLAMA_API_ENDPOINT // PUBLIC_API_BASE_URL=$OLLAMA_API_BASE_URL

View File

@ -7,7 +7,7 @@
const { saveAs } = fileSaver; const { saveAs } = fileSaver;
import hljs from 'highlight.js'; import hljs from 'highlight.js';
import 'highlight.js/styles/dark.min.css'; import 'highlight.js/styles/dark.min.css';
import { API_ENDPOINT } from '$lib/constants'; import { API_BASE_URL } from '$lib/constants';
import { onMount, tick } from 'svelte'; import { onMount, tick } from 'svelte';
import Navbar from '$lib/components/layout/Navbar.svelte'; import Navbar from '$lib/components/layout/Navbar.svelte';
@ -31,8 +31,8 @@
let messages = []; let messages = [];
onMount(async () => { onMount(async () => {
console.log(API_ENDPOINT); console.log(API_BASE_URL);
const res = await fetch(`${API_ENDPOINT}/tags`, { const res = await fetch(`${API_BASE_URL}/tags`, {
method: 'GET', method: 'GET',
headers: { headers: {
Accept: 'application/json', Accept: 'application/json',
@ -277,7 +277,7 @@
messages = [...messages, responseMessage]; messages = [...messages, responseMessage];
window.scrollTo({ top: document.body.scrollHeight }); window.scrollTo({ top: document.body.scrollHeight });
const res = await fetch(`${API_ENDPOINT}/generate`, { const res = await fetch(`${API_BASE_URL}/generate`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'text/event-stream' 'Content-Type': 'text/event-stream'
@ -373,7 +373,7 @@
messages = [...messages, responseMessage]; messages = [...messages, responseMessage];
window.scrollTo({ top: document.body.scrollHeight }); window.scrollTo({ top: document.body.scrollHeight });
const res = await fetch(`${API_ENDPOINT}/generate`, { const res = await fetch(`${API_BASE_URL}/generate`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'text/event-stream' 'Content-Type': 'text/event-stream'
@ -453,7 +453,7 @@
const generateTitle = async (user_prompt) => { const generateTitle = async (user_prompt) => {
console.log('generateTitle'); console.log('generateTitle');
const res = await fetch(`${API_ENDPOINT}/generate`, { const res = await fetch(`${API_BASE_URL}/generate`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'text/event-stream' 'Content-Type': 'text/event-stream'