Location>code7788 >text

ollama builds a local ai big model and applies the call

Popularity:55 ℃/2024-08-19 23:24:15

1、Download ollama

1) Go to the web site, click download to download
2) Just download and install it directly.

2. Launching the configuration model

The default is to start a cmd window and directly type

1 ollama run llama3

Launch the Llama3 model or launch the Chikuma model.

1 ollama run qwen2

Just start typing the question you need to type

 

3、Configure the UI interface

Installing docker
and deploy the web interface

1 docker run -d -p 3000:8080 --add-host=:host-gateway -v open-webui --restart always /open-webui/open-webui:main

After installation, the installation package is large and you need to wait for some time.
Localhost:3000 will open the URL.

 

4. Build a local knowledge base

AnythingLLM

5. Configuration file

Develop port 11434 for external access to the interface, and configure OLLAMA_ORIGINS=* if accessing across domains.

Windows version

Just configure it directly in the system environment variables, the

OLLAMA_HOST is the variable name and "0.0.0.0:11434" is the variable value.

1 OLLAMA_HOST= "0.0.0.0:11434"

MAC version

Configure OLLAMA_HOST

1 sudo sh -c 'echo "export OLLAMA_HOST=0.0.0.0:11434">>/etc/profile'launchctl setenv OLLAMA_HOST "0.0.0.0:11434"

Linux version

Configure OLLAMA_HOST

1 Environment="OLLAMA\_HOST=0.0.0.0"

6. Program call interface

Example of golang implementation: streaming for faster responsiveness and better user experience.

golang example: non-streaming response

package main

import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"os"
"strings"
"time"
)

const (
obaseURL = "http://localhost:11434/api"
omodelID = "qwen2:0.5b" // select the appropriate model
oendpoint = "/chat" //"/chat/completions"
)

// ChatCompletionRequest defines the structure of the request body
type olChatCompletionRequest struct {
Model string `json:"model"`
Messages []struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"messages"`
Stream bool `json:"stream"`
//Temperature float32 `json:"temperature"`
}

// ChatCompletionResponse defines the structure of the response body
type olChatCompletionResponse struct {
//Choices []struct {
Message struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"message"`
//} `json:"choices"`
}

// sendRequestWithRetry sends the request and handles possible 429 errors
func olsendRequestWithRetry(client *, requestBody []byte) (*, error) {
req, err := ("POST", obaseURL+oendpoint, (requestBody))
if err != nil {
return nil, err
}

("Content-Type", "application/json")
//("Authorization", "Bearer "+apiKey)

resp, err := (req)
if err != nil {
return nil, err
}

if == {
retryAfter := ("Retry-After")
if retryAfter != "" {
duration, _ := (retryAfter)
(duration)
} else {
(5 * ) // Wait 5 seconds by default
}
return olsendRequestWithRetry(client, requestBody) // recursive retry
}

return resp, nil
}

func main() {
client := &{} // Create a global HTTP client instance.

// Initialize conversation history
history := []struct {
Role string `json:"role"`
Content string `json:"content"`
}{
{"system", "You are a Tang Dynasty poet who is particularly good at imitating Li Bai's style."} ,
}

// Create a scanner for standard input
scanner := ()

for {
("Please enter your question (or type 'exit' to exit): ")
()
userInput := (())

// Exit conditions
if userInput == "exit" {
("Thanks for using, see you soon!")
break
}

// Add user input to history
history = append(history, struct {
Role string `json:"role"`
Content string `json:"content"`
}{
"user",
userInput,
})

// Create the request body
requestBody := olChatCompletionRequest{
Model: omodelID,
Messages: history,
Stream: false,
//Temperature: 0.7,
}

// Construct the full request body, including history messages
= append([]struct {
Role string `json:"role"`
Content string `json:"content"`
}{
{
Role: "system",
Content: "You are a Tang Dynasty poet who is particularly good at imitating Li Bai's style." ,
},
}, history...)

// Serialize the request body to JSON
requestBodyJSON, err := (requestBody)
if err != nil {
("Error marshalling request body:", err)
continue
}
("wocao:" + string(requestBodyJSON))
// Send the request and handle the retries
resp, err := olsendRequestWithRetry(client, requestBodyJSON)
if err != nil {
("Error sending request after retries:", err)
continue
}
defer ()

// Check the response status code
if != {
("Received non-200 response status code: %d\n", )
continue
}

// Read the response body
responseBody, err := ()
if err != nil {
("Error reading response body:", err)
continue
}
//("0000" + string(responseBody))
// Parsing the response body
var completionResponse olChatCompletionResponse
err = (responseBody, &completionResponse)
if err != nil {
("Error unmarshalling response body:", err)
continue
}
("AI Reply: %s\n", ) //
// Add user's messages to history
history = append(history, struct {
Role string `json:"role"`
Content string `json:"content"`
}{
Role: ,
Content: , // Assuming the user's message is the first one
}) 

}
}

 

golang example: streaming response
  1 package main
  2 
  3 import (
  4     "bufio"
  5     "bytes"
  6     "encoding/json"
  7     "fmt"
  8     "io"
  9     "net/http"
 10     "os"
 11     "strings"
 12     "time"
 13 )
 14 
 15 const (
 16     obaseURL  = "http://localhost:11434/api" 
 17     omodelID  = "qwen2:0.5b"                 // Choosing the right model
 18     oendpoint = "/chat"                      //"/chat/completions"
 19 )
 20 
 21 // ChatCompletionRequest defines the structure of the request body
 22 type olChatCompletionRequest struct {
 23     Model    string `json:"model"`
 24     Messages []struct {
 25         Role    string `json:"role"`
 26         Content string `json:"content"`
 27     } `json:"messages"`
 28     Stream bool `json:"stream"`
 29     //Temperature float32 `json:"temperature"`
 30 }
 31 
 32 // ChatCompletionResponse defines the structure of the response body
 33 type olChatCompletionResponse struct {
 34     //Choices []struct {
 35     Message struct {
 36         Role    string `json:"role"`
 37         Content string `json:"content"`
 38     } `json:"message"`
 39     //} `json:"choices"`
 40 }
 41 
 42 // sendRequestWithRetry sends a request and handles possible 429 errors
 43 func olsendRequestWithRetry(client *, requestBody []byte) (*, error) {
 44     req, err := ("POST", obaseURL+oendpoint, (requestBody))
 45     if err != nil {
 46         return nil, err
 47     }
 48 
 49     ("Content-Type", "application/json")
 50     //("Authorization", "Bearer "+apiKey)
 51 
 52     resp, err := (req)
 53     if err != nil {
 54         return nil, err
 55     }
 56 
 57     if  ==  {
 58         retryAfter := ("Retry-After")
 59         if retryAfter != "" {
 60             duration, _ := (retryAfter)
 61             (duration)
 62         } else {
 63             (5 * ) // Default wait 5 seconds
 64         }
 65         return olsendRequestWithRetry(client, requestBody) // recursive retry
 66     }
 67 
 68     return resp, nil
 69 }
 70 
 71 func main() {
 72     client := &{} // Create a global instance of the HTTP client
 73 
 74     // Initialize Conversation History
 75     history := []struct {
 76         Role    string `json:"role"`
 77         Content string `json:"content"`
 78     }{
 79         {"system", "You are a Tang Dynasty poet who is particularly good at imitating Li Bai's style."},
 80     }
 81 
 82     // Creating Scanners for Standard Inputs
 83     scanner := ()
 84 
 85     for {
 86         ("Please enter your question (or type 'exit' to exit).")
 87         ()
 88         userInput := (())
 89 
 90         // Withdrawal conditions
 91         if userInput == "exit" {
 92             ("Thanks for using, see you soon!")
 93             break
 94         }
 95 
 96         // Add user input to history
 97         history = append(history, struct {
 98             Role    string `json:"role"`
 99             Content string `json:"content"`
100         }{
101             "user",
102             userInput,
103         })
104 
105         // Creating a request body
106         requestBody := olChatCompletionRequest{
107             Model:    omodelID,
108             Messages: history,
109             Stream:   true,
110             //Temperature: 0.7,
111         }
112 
113         // Construct complete request body with history messages
114          = append([]struct {
115             Role    string `json:"role"`
116             Content string `json:"content"`
117         }{
118             {
119                 Role:    "system",
120                 Content: "You are a Tang Dynasty poet who is particularly good at imitating Li Bai's style.",
121             },
122         }, history...)
123 
124         // Serialize the request body to JSON
125         requestBodyJSON, err := (requestBody)
126         if err != nil {
127             ("Error marshalling request body:", err)
128             continue
129         }
130         ("wocao:" + string(requestBodyJSON))
131         // Sending requests and handling retries
132         resp, err := olsendRequestWithRetry(client, requestBodyJSON)
133         if err != nil {
134             ("Error sending request after retries:", err)
135             continue
136         }
137         defer ()
138 
139         // Check the response status code
140         if  !=  {
141             ("Received non-200 response status code: %d\n", )
142             continue
143         }
144                resutlmessage := ""
145         streamReader := 
146         buf := make([]byte, 1024) // Or use a larger buffer to improve read performance
147         var completionResponse olChatCompletionResponse
148         ("AI replied.")
149         for {
150             n, err := (buf)
151             if n > 0 {
152                 // Processing the received data, here is a simple printout
153                 //(string(buf[:n]))
154                 err = (buf[:n], &completionResponse)
155                 (string())
156                                resutlmessage+=string()
157                 if err != nil {
158                     ("Error unmarshalling response body:", err)
159                     continue
160                 }
161             }
162             if err != nil {
163                 if err ==  {
164                     ("")
165                     break
166                 }
167                 panic(err)
168             }
169         }
170 
171         // Add user's messages to history
172         history = append(history, struct {
173             Role    string `json:"role"`
174             Content string `json:"content"`
175         }{
176             Role:    ,
177             Content: resutlmessage,//, // Assuming the user's message is the first
178         })
179     }
180 }

 


Thanks for watching. Thank you!