167 changed files with 2919 additions and 1296 deletions
@ -0,0 +1,8 @@ |
|||
dependencies { |
|||
api( |
|||
project(":io.sc.platform.system"), |
|||
project(":io.sc.platform.ai.frontend"), |
|||
//"io.github.lnyo-cly:ai4j-spring-boot-starter:${ai4j_version}", |
|||
"com.squareup.okhttp3:okhttp:${okhttp_version}" |
|||
) |
|||
} |
@ -0,0 +1,4 @@ |
|||
package io.sc.platform.ai.anythingllm.service; |
|||
|
|||
public interface AnythingllmService { |
|||
} |
@ -0,0 +1,6 @@ |
|||
package io.sc.platform.ai.anythingllm.service.impl; |
|||
|
|||
import io.sc.platform.ai.anythingllm.service.AnythingllmService; |
|||
|
|||
public class AnythingllmServiceImpl implements AnythingllmService { |
|||
} |
@ -0,0 +1,86 @@ |
|||
package io.sc.platform.ai.anythingllm.service.support.workspaces; |
|||
|
|||
import com.fasterxml.jackson.core.JsonProcessingException; |
|||
import io.sc.platform.ai.ollama.MessageWrapper; |
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
import io.sc.platform.ai.ollama.service.support.chat.ChatRequest; |
|||
import io.sc.platform.ai.ollama.service.support.chat.Message; |
|||
import io.sc.platform.util.CollectionUtil; |
|||
import io.sc.platform.util.ObjectMapperUtil; |
|||
import okhttp3.*; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.util.StringUtils; |
|||
import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyEmitter; |
|||
|
|||
import java.util.List; |
|||
|
|||
public class StreamChatApi extends OllamaApi { |
|||
private static final Logger log = LoggerFactory.getLogger(StreamChatApi.class); |
|||
|
|||
public StreamChatApi(String baseUrl){ |
|||
super("/api/chat","POST"); |
|||
this.baseUrl =baseUrl; |
|||
} |
|||
|
|||
public ResponseBodyEmitter execute(MessageWrapper chatRequest) { |
|||
ChatRequest chatCompletion =createChatCompletion(chatRequest); |
|||
if(chatCompletion==null) { return null; } |
|||
Call call =createRequestCall(chatCompletion); |
|||
if(call==null) { return null; } |
|||
|
|||
ResponseBodyEmitter emitter =new ResponseBodyEmitter(); |
|||
call.enqueue(new StreamChatCallback(emitter)); |
|||
return emitter; |
|||
} |
|||
|
|||
private ChatRequest createChatCompletion(MessageWrapper chatRequest) { |
|||
if(chatRequest==null) { return null; } |
|||
|
|||
String model =chatRequest.getModel(); |
|||
if(!StringUtils.hasText(model)) { return null; } |
|||
|
|||
List<String> questions =chatRequest.getQuestions(); |
|||
if(!CollectionUtil.hasElements(questions)){ return null; } |
|||
|
|||
ChatRequest chatCompletion =new ChatRequest(); |
|||
chatCompletion.setModel(model); |
|||
|
|||
chatCompletion.addMessage(new Message("user", "使用中文回答")); |
|||
for(String question : questions){ |
|||
chatCompletion.addMessage(new Message("user",question)); |
|||
} |
|||
return chatCompletion; |
|||
} |
|||
|
|||
private Call createRequestCall(ChatRequest chatCompletion) { |
|||
if(chatCompletion==null) { return null; } |
|||
|
|||
OkHttpClient client = new OkHttpClient.Builder() |
|||
.connectTimeout(this.connectTimeout) |
|||
.readTimeout(this.readTimeout) |
|||
.writeTimeout(this.writeTimeout) |
|||
.build(); |
|||
|
|||
Headers headers = new Headers.Builder() |
|||
.set("Content-Type", "application/json") |
|||
.set("Accept", "text/event-stream") |
|||
.build(); |
|||
String json =""; |
|||
try { |
|||
json = ObjectMapperUtil.json().writeValueAsString(chatCompletion); |
|||
} catch (JsonProcessingException e){ |
|||
log.error("",e); |
|||
return null; |
|||
} |
|||
RequestBody body = RequestBody.create(json, MediaType.parse("application/json; charset=utf-8")); |
|||
okhttp3.Request request = new okhttp3.Request.Builder() |
|||
.url(this.baseUrl + this.url) |
|||
.headers(headers) |
|||
.post(body) |
|||
.build(); |
|||
|
|||
Call call = client.newCall(request); |
|||
return call; |
|||
} |
|||
} |
@ -0,0 +1,45 @@ |
|||
package io.sc.platform.ai.anythingllm.service.support.workspaces; |
|||
|
|||
import io.sc.platform.ai.ollama.service.support.chat.ChatResponse; |
|||
import io.sc.platform.util.ObjectMapperUtil; |
|||
import okhttp3.Call; |
|||
import okhttp3.Callback; |
|||
import okhttp3.Response; |
|||
import okhttp3.ResponseBody; |
|||
import okio.BufferedSource; |
|||
import org.jetbrains.annotations.NotNull; |
|||
import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyEmitter; |
|||
|
|||
import java.io.IOException; |
|||
|
|||
public class StreamChatCallback implements Callback { |
|||
private ResponseBodyEmitter emitter; |
|||
|
|||
public StreamChatCallback(ResponseBodyEmitter emitter){ |
|||
this.emitter =emitter; |
|||
} |
|||
|
|||
@Override |
|||
public void onFailure(@NotNull Call call, @NotNull IOException e) { |
|||
emitter.completeWithError(e); |
|||
} |
|||
|
|||
@Override |
|||
public void onResponse(@NotNull Call call, @NotNull Response response) throws IOException { |
|||
if(response.isSuccessful()) { |
|||
try (ResponseBody body = response.body()) { |
|||
BufferedSource bufferedSource = body.source(); |
|||
while (!bufferedSource.exhausted()) { |
|||
String line = bufferedSource.readUtf8Line(); |
|||
ChatResponse responseObject =ObjectMapperUtil.json().readValue(line, ChatResponse.class); |
|||
emitter.send(responseObject.getMessage().getContent()); |
|||
} |
|||
emitter.complete(); |
|||
} catch (Exception e) { |
|||
emitter.completeWithError(e); |
|||
} |
|||
} else { |
|||
emitter.completeWithError(new RuntimeException(response.message())); |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,25 @@ |
|||
package io.sc.platform.ai.ollama; |
|||
|
|||
import java.util.ArrayList; |
|||
import java.util.List; |
|||
|
|||
public class MessageWrapper { |
|||
private String model; |
|||
private List<String> questions =new ArrayList<>(); |
|||
|
|||
public String getModel() { |
|||
return model; |
|||
} |
|||
|
|||
public void setModel(String model) { |
|||
this.model = model; |
|||
} |
|||
|
|||
public List<String> getQuestions() { |
|||
return questions; |
|||
} |
|||
|
|||
public void setQuestions(List<String> questions) { |
|||
this.questions = questions; |
|||
} |
|||
} |
@ -0,0 +1,66 @@ |
|||
package io.sc.platform.ai.ollama; |
|||
|
|||
import java.time.Duration; |
|||
|
|||
public class OllamaApi { |
|||
protected String baseUrl ="http://localhost:11434"; |
|||
protected String url; |
|||
protected String method; |
|||
protected Duration connectTimeout =Duration.ofMinutes(2); |
|||
protected Duration readTimeout =Duration.ofMinutes(2); |
|||
protected Duration writeTimeout =Duration.ofMinutes(2); |
|||
|
|||
public OllamaApi(){} |
|||
public OllamaApi(String url,String method){ |
|||
this.url =url; |
|||
this.method =method; |
|||
} |
|||
|
|||
public String getBaseUrl() { |
|||
return baseUrl; |
|||
} |
|||
|
|||
public void setBaseUrl(String baseUrl) { |
|||
this.baseUrl = baseUrl; |
|||
} |
|||
|
|||
public String getUrl() { |
|||
return url; |
|||
} |
|||
|
|||
public void setUrl(String url) { |
|||
this.url = url; |
|||
} |
|||
|
|||
public String getMethod() { |
|||
return method; |
|||
} |
|||
|
|||
public void setMethod(String method) { |
|||
this.method = method; |
|||
} |
|||
|
|||
public Duration getConnectTimeout() { |
|||
return connectTimeout; |
|||
} |
|||
|
|||
public void setConnectTimeout(Duration connectTimeout) { |
|||
this.connectTimeout = connectTimeout; |
|||
} |
|||
|
|||
public Duration getReadTimeout() { |
|||
return readTimeout; |
|||
} |
|||
|
|||
public void setReadTimeout(Duration readTimeout) { |
|||
this.readTimeout = readTimeout; |
|||
} |
|||
|
|||
public Duration getWriteTimeout() { |
|||
return writeTimeout; |
|||
} |
|||
|
|||
public void setWriteTimeout(Duration writeTimeout) { |
|||
this.writeTimeout = writeTimeout; |
|||
} |
|||
} |
@ -0,0 +1,31 @@ |
|||
package io.sc.platform.ai.ollama.controller; |
|||
|
|||
import io.sc.platform.ai.ollama.MessageWrapper; |
|||
import io.sc.platform.ai.ollama.service.OllamaService; |
|||
import io.sc.platform.ai.ollama.service.support.tags.TagsResponse; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.web.bind.annotation.*; |
|||
import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyEmitter; |
|||
|
|||
import java.util.List; |
|||
|
|||
@RestController("io.sc.platform.ai.ollama.controller.OllamaWebController") |
|||
@RequestMapping("/api/ai/ollama") |
|||
public class OllamaWebController { |
|||
@Autowired private OllamaService ollamaService; |
|||
|
|||
@PostMapping("/chat") |
|||
public ResponseBodyEmitter chat(@RequestBody MessageWrapper wrapper){ |
|||
return ollamaService.chat(wrapper); |
|||
} |
|||
|
|||
@GetMapping("/tags") |
|||
public TagsResponse tags(){ |
|||
return ollamaService.tags(); |
|||
} |
|||
|
|||
@GetMapping("/modelNames") |
|||
public List<String> modelNames(){ |
|||
return ollamaService.modelNames(); |
|||
} |
|||
} |
@ -0,0 +1,13 @@ |
|||
package io.sc.platform.ai.ollama.service; |
|||
|
|||
import io.sc.platform.ai.ollama.MessageWrapper; |
|||
import io.sc.platform.ai.ollama.service.support.tags.TagsResponse; |
|||
import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyEmitter; |
|||
|
|||
import java.util.List; |
|||
|
|||
public interface OllamaService { |
|||
public ResponseBodyEmitter chat(MessageWrapper wrapper); |
|||
public TagsResponse tags(); |
|||
public List<String> modelNames(); |
|||
} |
@ -0,0 +1,69 @@ |
|||
package io.sc.platform.ai.ollama.service.impl; |
|||
|
|||
import io.sc.platform.ai.ollama.MessageWrapper; |
|||
import io.sc.platform.ai.ollama.service.OllamaService; |
|||
import io.sc.platform.ai.ollama.service.support.chat.ChatApi; |
|||
import io.sc.platform.ai.ollama.service.support.tags.Model; |
|||
import io.sc.platform.ai.ollama.service.support.tags.TagsApi; |
|||
import io.sc.platform.ai.ollama.service.support.tags.TagsResponse; |
|||
import io.sc.platform.mvc.service.SystemParameterService; |
|||
import io.sc.platform.util.CollectionUtil; |
|||
import io.sc.platform.util.StringUtil; |
|||
import io.sc.platform.util.support.NumberStringComparator; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.stereotype.Service; |
|||
import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyEmitter; |
|||
|
|||
import java.util.ArrayList; |
|||
import java.util.Collections; |
|||
import java.util.List; |
|||
import java.util.Map; |
|||
|
|||
@Service("io.sc.platform.ai.service.impl.OllamaServiceImpl") |
|||
public class OllamaServiceImpl implements OllamaService { |
|||
private static final String KEY_API_URL ="parameter.ai.provider.ollama.apiUrl"; |
|||
private static final String KEY_DEFAULT_MODEL_NAME ="parameter.ai.provider.ollama.defaultModelName"; |
|||
|
|||
@Autowired private SystemParameterService systemParameterService; |
|||
|
|||
@Override |
|||
public ResponseBodyEmitter chat(MessageWrapper wrapper) { |
|||
Map<String,String> parameters =getParameters(); |
|||
if(!StringUtil.hasText(wrapper.getModel())){ |
|||
wrapper.setModel(parameters.get(KEY_DEFAULT_MODEL_NAME)); |
|||
} |
|||
ChatApi api =new ChatApi(parameters.get(KEY_API_URL)); |
|||
return api.execute(wrapper); |
|||
} |
|||
|
|||
@Override |
|||
public TagsResponse tags() { |
|||
Map<String,String> parameters =getParameters(); |
|||
TagsApi api =new TagsApi(parameters.get(KEY_API_URL)); |
|||
return api.execute(); |
|||
} |
|||
|
|||
@Override |
|||
public List<String> modelNames() { |
|||
Map<String,String> parameters =getParameters(); |
|||
TagsApi api =new TagsApi(parameters.get(KEY_API_URL)); |
|||
TagsResponse response =api.execute(); |
|||
if(response==null){ |
|||
return Collections.emptyList(); |
|||
} |
|||
List<Model> models =response.getModels(); |
|||
if(!CollectionUtil.hasElements(models)){ |
|||
return Collections.emptyList(); |
|||
} |
|||
List<String> result =new ArrayList<>(); |
|||
for(Model model : models){ |
|||
result.add(model.getModel()); |
|||
} |
|||
Collections.sort(result, new NumberStringComparator()); |
|||
return result; |
|||
} |
|||
|
|||
private Map<String,String> getParameters(){ |
|||
return systemParameterService.getParameters(new String[]{KEY_API_URL,KEY_DEFAULT_MODEL_NAME}); |
|||
} |
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.blobs; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class BlobsApi extends OllamaApi { |
|||
public BlobsApi(){ |
|||
super("/api/blobs/","HEAD"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,84 @@ |
|||
package io.sc.platform.ai.ollama.service.support.chat; |
|||
|
|||
import com.fasterxml.jackson.core.JsonProcessingException; |
|||
import io.sc.platform.ai.ollama.MessageWrapper; |
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
import io.sc.platform.util.CollectionUtil; |
|||
import io.sc.platform.util.ObjectMapperUtil; |
|||
import okhttp3.*; |
|||
import org.slf4j.Logger; |
|||
import org.slf4j.LoggerFactory; |
|||
import org.springframework.util.StringUtils; |
|||
import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyEmitter; |
|||
|
|||
import java.util.List; |
|||
|
|||
public class ChatApi extends OllamaApi { |
|||
private static final Logger log = LoggerFactory.getLogger(ChatApi.class); |
|||
|
|||
public ChatApi(String baseUrl){ |
|||
super("/api/chat","POST"); |
|||
this.baseUrl =baseUrl; |
|||
} |
|||
|
|||
public ResponseBodyEmitter execute(MessageWrapper wrapper) { |
|||
ChatRequest chatRequest =createChatRequest(wrapper); |
|||
if(chatRequest==null) { return null; } |
|||
Call call =createRequestCall(chatRequest); |
|||
if(call==null) { return null; } |
|||
|
|||
ResponseBodyEmitter emitter =new ResponseBodyEmitter(); |
|||
call.enqueue(new ChatCallback(emitter)); |
|||
return emitter; |
|||
} |
|||
|
|||
private ChatRequest createChatRequest(MessageWrapper wrapper) { |
|||
if(wrapper==null) { return null; } |
|||
|
|||
String model =wrapper.getModel(); |
|||
if(!StringUtils.hasText(model)) { return null; } |
|||
|
|||
List<String> questions =wrapper.getQuestions(); |
|||
if(!CollectionUtil.hasElements(questions)){ return null; } |
|||
|
|||
ChatRequest request =new ChatRequest(); |
|||
request.setModel(model); |
|||
|
|||
request.addMessage(new Message("user", "使用中文回答")); |
|||
for(String question : questions){ |
|||
request.addMessage(new Message("user",question)); |
|||
} |
|||
return request; |
|||
} |
|||
|
|||
private Call createRequestCall(ChatRequest chatRequest) { |
|||
if(chatRequest==null) { return null; } |
|||
|
|||
OkHttpClient client = new OkHttpClient.Builder() |
|||
.connectTimeout(this.connectTimeout) |
|||
.readTimeout(this.readTimeout) |
|||
.writeTimeout(this.writeTimeout) |
|||
.build(); |
|||
|
|||
Headers headers = new Headers.Builder() |
|||
.set("Content-Type", "application/json") |
|||
.set("Accept", "text/event-stream") |
|||
.build(); |
|||
String json =""; |
|||
try { |
|||
json = ObjectMapperUtil.json().writeValueAsString(chatRequest); |
|||
} catch (JsonProcessingException e){ |
|||
log.error("",e); |
|||
return null; |
|||
} |
|||
RequestBody body = RequestBody.create(json, MediaType.parse("application/json; charset=utf-8")); |
|||
okhttp3.Request request = new okhttp3.Request.Builder() |
|||
.url(this.baseUrl + this.url) |
|||
.headers(headers) |
|||
.post(body) |
|||
.build(); |
|||
|
|||
Call call = client.newCall(request); |
|||
return call; |
|||
} |
|||
} |
@ -0,0 +1,44 @@ |
|||
package io.sc.platform.ai.ollama.service.support.chat; |
|||
|
|||
import io.sc.platform.util.ObjectMapperUtil; |
|||
import okhttp3.Call; |
|||
import okhttp3.Callback; |
|||
import okhttp3.Response; |
|||
import okhttp3.ResponseBody; |
|||
import okio.BufferedSource; |
|||
import org.jetbrains.annotations.NotNull; |
|||
import org.springframework.web.servlet.mvc.method.annotation.ResponseBodyEmitter; |
|||
|
|||
import java.io.IOException; |
|||
|
|||
public class ChatCallback implements Callback { |
|||
private ResponseBodyEmitter emitter; |
|||
|
|||
public ChatCallback(ResponseBodyEmitter emitter){ |
|||
this.emitter =emitter; |
|||
} |
|||
|
|||
@Override |
|||
public void onFailure(@NotNull Call call, @NotNull IOException e) { |
|||
emitter.completeWithError(e); |
|||
} |
|||
|
|||
@Override |
|||
public void onResponse(@NotNull Call call, @NotNull Response response) throws IOException { |
|||
if(response.isSuccessful()) { |
|||
try (ResponseBody body = response.body()) { |
|||
BufferedSource bufferedSource = body.source(); |
|||
while (!bufferedSource.exhausted()) { |
|||
String line = bufferedSource.readUtf8Line(); |
|||
ChatResponse chatResponse =ObjectMapperUtil.json().readValue(line, ChatResponse.class); |
|||
emitter.send(chatResponse.getMessage().getContent()); |
|||
} |
|||
emitter.complete(); |
|||
} catch (Exception e) { |
|||
emitter.completeWithError(e); |
|||
} |
|||
} else { |
|||
emitter.completeWithError(new RuntimeException(response.message())); |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,80 @@ |
|||
package io.sc.platform.ai.ollama.service.support.chat; |
|||
|
|||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; |
|||
|
|||
import java.util.ArrayList; |
|||
import java.util.HashMap; |
|||
import java.util.List; |
|||
import java.util.Map; |
|||
|
|||
@JsonIgnoreProperties(ignoreUnknown=true) |
|||
public class ChatRequest { |
|||
private String model; |
|||
private List<Message> messages =new ArrayList<>(); |
|||
private List<Tool> tools =new ArrayList<>(); |
|||
|
|||
private String format; |
|||
private Map<String,Object> options =new HashMap<>(); |
|||
private boolean stream =true; |
|||
private long keepAlive =1000*60*5; |
|||
|
|||
public void addMessage(Message message){ |
|||
messages.add(message); |
|||
} |
|||
|
|||
public String getModel() { |
|||
return model; |
|||
} |
|||
|
|||
public void setModel(String model) { |
|||
this.model = model; |
|||
} |
|||
|
|||
public List<Message> getMessages() { |
|||
return messages; |
|||
} |
|||
|
|||
public void setMessages(List<Message> messages) { |
|||
this.messages = messages; |
|||
} |
|||
|
|||
public List<Tool> getTools() { |
|||
return tools; |
|||
} |
|||
|
|||
public void setTools(List<Tool> tools) { |
|||
this.tools = tools; |
|||
} |
|||
|
|||
public String getFormat() { |
|||
return format; |
|||
} |
|||
|
|||
public void setFormat(String format) { |
|||
this.format = format; |
|||
} |
|||
|
|||
public Map<String, Object> getOptions() { |
|||
return options; |
|||
} |
|||
|
|||
public void setOptions(Map<String, Object> options) { |
|||
this.options = options; |
|||
} |
|||
|
|||
public boolean getStream() { |
|||
return stream; |
|||
} |
|||
|
|||
public void setStream(boolean stream) { |
|||
this.stream = stream; |
|||
} |
|||
|
|||
public long getKeepAlive() { |
|||
return keepAlive; |
|||
} |
|||
|
|||
public void setKeepAlive(long keepAlive) { |
|||
this.keepAlive = keepAlive; |
|||
} |
|||
} |
@ -0,0 +1,117 @@ |
|||
package io.sc.platform.ai.ollama.service.support.chat; |
|||
|
|||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; |
|||
import com.fasterxml.jackson.annotation.JsonProperty; |
|||
|
|||
@JsonIgnoreProperties(ignoreUnknown=true) |
|||
public class ChatResponse { |
|||
@JsonProperty("model") |
|||
private String model; |
|||
|
|||
@JsonProperty("created_at") |
|||
private String createdAt; |
|||
|
|||
@JsonProperty("message") |
|||
private Message message; |
|||
|
|||
@JsonProperty("done") |
|||
private boolean done; |
|||
|
|||
@JsonProperty("total_duration") |
|||
private long totalDuration; |
|||
|
|||
@JsonProperty("load_duration") |
|||
private long loadDuration; |
|||
|
|||
@JsonProperty("prompt_eval_count") |
|||
private long promptEvalCount; |
|||
|
|||
@JsonProperty("prompt_eval_duration") |
|||
private long promptEvalDuration; |
|||
|
|||
@JsonProperty("eval_count") |
|||
private long evalCount; |
|||
|
|||
@JsonProperty("eval_duration") |
|||
private long evalDuration; |
|||
|
|||
public String getModel() { |
|||
return model; |
|||
} |
|||
|
|||
public void setModel(String model) { |
|||
this.model = model; |
|||
} |
|||
|
|||
public String getCreatedAt() { |
|||
return createdAt; |
|||
} |
|||
|
|||
public void setCreatedAt(String createdAt) { |
|||
this.createdAt = createdAt; |
|||
} |
|||
|
|||
public Message getMessage() { |
|||
return message; |
|||
} |
|||
|
|||
public void setMessage(Message message) { |
|||
this.message = message; |
|||
} |
|||
|
|||
public boolean isDone() { |
|||
return done; |
|||
} |
|||
|
|||
public void setDone(boolean done) { |
|||
this.done = done; |
|||
} |
|||
|
|||
public long getTotalDuration() { |
|||
return totalDuration; |
|||
} |
|||
|
|||
public void setTotalDuration(long totalDuration) { |
|||
this.totalDuration = totalDuration; |
|||
} |
|||
|
|||
public long getLoadDuration() { |
|||
return loadDuration; |
|||
} |
|||
|
|||
public void setLoadDuration(long loadDuration) { |
|||
this.loadDuration = loadDuration; |
|||
} |
|||
|
|||
public long getPromptEvalCount() { |
|||
return promptEvalCount; |
|||
} |
|||
|
|||
public void setPromptEvalCount(long promptEvalCount) { |
|||
this.promptEvalCount = promptEvalCount; |
|||
} |
|||
|
|||
public long getPromptEvalDuration() { |
|||
return promptEvalDuration; |
|||
} |
|||
|
|||
public void setPromptEvalDuration(long promptEvalDuration) { |
|||
this.promptEvalDuration = promptEvalDuration; |
|||
} |
|||
|
|||
public long getEvalCount() { |
|||
return evalCount; |
|||
} |
|||
|
|||
public void setEvalCount(long evalCount) { |
|||
this.evalCount = evalCount; |
|||
} |
|||
|
|||
public long getEvalDuration() { |
|||
return evalDuration; |
|||
} |
|||
|
|||
public void setEvalDuration(long evalDuration) { |
|||
this.evalDuration = evalDuration; |
|||
} |
|||
} |
@ -0,0 +1,54 @@ |
|||
package io.sc.platform.ai.ollama.service.support.chat; |
|||
|
|||
import java.util.ArrayList; |
|||
import java.util.List; |
|||
|
|||
public class Message { |
|||
public static final String SYSTEM ="system"; |
|||
public static final String USER ="user"; |
|||
public static final String ASSISTANT ="assistant"; |
|||
public static final String TOOL ="tool"; |
|||
|
|||
private String role; |
|||
private String content; |
|||
private List<String> images =new ArrayList<>(); |
|||
private List<Tool> toolCalls =new ArrayList<>(); |
|||
|
|||
public Message(){} |
|||
public Message(String role,String content){ |
|||
this.role =role; |
|||
this.content =content; |
|||
} |
|||
|
|||
public String getRole() { |
|||
return role; |
|||
} |
|||
|
|||
public void setRole(String role) { |
|||
this.role = role; |
|||
} |
|||
|
|||
public String getContent() { |
|||
return content; |
|||
} |
|||
|
|||
public void setContent(String content) { |
|||
this.content = content; |
|||
} |
|||
|
|||
public List<String> getImages() { |
|||
return images; |
|||
} |
|||
|
|||
public void setImages(List<String> images) { |
|||
this.images = images; |
|||
} |
|||
|
|||
public List<Tool> getToolCalls() { |
|||
return toolCalls; |
|||
} |
|||
|
|||
public void setToolCalls(List<Tool> toolCalls) { |
|||
this.toolCalls = toolCalls; |
|||
} |
|||
} |
@ -0,0 +1,4 @@ |
|||
package io.sc.platform.ai.ollama.service.support.chat; |
|||
|
|||
public class Tool { |
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.copy; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class OllamaCopyApi extends OllamaApi { |
|||
public OllamaCopyApi(){ |
|||
super("/api/copy","POST"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.create; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class OllamaCreateApi extends OllamaApi { |
|||
public OllamaCreateApi(){ |
|||
super("/api/create","POST"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.delete; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class OllamaDeleteApi extends OllamaApi { |
|||
public OllamaDeleteApi(){ |
|||
super("/api/delete","DELETE"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.embed; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class OllamaEmbedApi extends OllamaApi { |
|||
public OllamaEmbedApi(){ |
|||
super("/api/embed","POST"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.embeddings; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class OllamaEmbeddingsApi extends OllamaApi { |
|||
public OllamaEmbeddingsApi(){ |
|||
super("/api/embeddings","POST"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.generate; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class OllamaGenerateApi extends OllamaApi { |
|||
public OllamaGenerateApi(){ |
|||
super("/api/generate","POST"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.ps; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class OllamaPsApi extends OllamaApi { |
|||
public OllamaPsApi(){ |
|||
super("/api/ps","GET"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.pull; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class OllamaPullApi extends OllamaApi { |
|||
public OllamaPullApi(){ |
|||
super("/api/pull","POST"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.push; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class OllamaPushApi extends OllamaApi { |
|||
public OllamaPushApi(){ |
|||
super("/api/push","POST"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.show; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class OllamaShowApi extends OllamaApi { |
|||
public OllamaShowApi(){ |
|||
super("/api/show","POST"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,71 @@ |
|||
package io.sc.platform.ai.ollama.service.support.tags; |
|||
|
|||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; |
|||
import com.fasterxml.jackson.annotation.JsonProperty; |
|||
|
|||
import java.util.List; |
|||
|
|||
@JsonIgnoreProperties(ignoreUnknown=true) |
|||
public class Details { |
|||
@JsonProperty("parent_model") |
|||
private String parentModel; |
|||
|
|||
private String format; |
|||
private String family; |
|||
private List<String> families; |
|||
|
|||
@JsonProperty("parameter_size") |
|||
private String parameterSize; |
|||
|
|||
@JsonProperty("quantization_level") |
|||
private String quantizationLevel; |
|||
|
|||
public String getParentModel() { |
|||
return parentModel; |
|||
} |
|||
|
|||
public void setParentModel(String parentModel) { |
|||
this.parentModel = parentModel; |
|||
} |
|||
|
|||
public String getFormat() { |
|||
return format; |
|||
} |
|||
|
|||
public void setFormat(String format) { |
|||
this.format = format; |
|||
} |
|||
|
|||
public String getFamily() { |
|||
return family; |
|||
} |
|||
|
|||
public void setFamily(String family) { |
|||
this.family = family; |
|||
} |
|||
|
|||
public List<String> getFamilies() { |
|||
return families; |
|||
} |
|||
|
|||
public void setFamilies(List<String> families) { |
|||
this.families = families; |
|||
} |
|||
|
|||
public String getParameterSize() { |
|||
return parameterSize; |
|||
} |
|||
|
|||
public void setParameterSize(String parameterSize) { |
|||
this.parameterSize = parameterSize; |
|||
} |
|||
|
|||
public String getQuantizationLevel() { |
|||
return quantizationLevel; |
|||
} |
|||
|
|||
public void setQuantizationLevel(String quantizationLevel) { |
|||
this.quantizationLevel = quantizationLevel; |
|||
} |
|||
} |
|||
|
@ -0,0 +1,65 @@ |
|||
package io.sc.platform.ai.ollama.service.support.tags; |
|||
|
|||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; |
|||
import com.fasterxml.jackson.annotation.JsonProperty; |
|||
|
|||
@JsonIgnoreProperties(ignoreUnknown=true) |
|||
public class Model { |
|||
private String name; |
|||
private String model; |
|||
|
|||
@JsonProperty("modified_at") |
|||
private String modifiedAt; |
|||
|
|||
private long size; |
|||
private String digest; |
|||
private Details details; |
|||
|
|||
public String getName() { |
|||
return name; |
|||
} |
|||
|
|||
public void setName(String name) { |
|||
this.name = name; |
|||
} |
|||
|
|||
public String getModel() { |
|||
return model; |
|||
} |
|||
|
|||
public void setModel(String model) { |
|||
this.model = model; |
|||
} |
|||
|
|||
public String getModifiedAt() { |
|||
return modifiedAt; |
|||
} |
|||
|
|||
public void setModifiedAt(String modifiedAt) { |
|||
this.modifiedAt = modifiedAt; |
|||
} |
|||
|
|||
public long getSize() { |
|||
return size; |
|||
} |
|||
|
|||
public void setSize(long size) { |
|||
this.size = size; |
|||
} |
|||
|
|||
public String getDigest() { |
|||
return digest; |
|||
} |
|||
|
|||
public void setDigest(String digest) { |
|||
this.digest = digest; |
|||
} |
|||
|
|||
public Details getDetails() { |
|||
return details; |
|||
} |
|||
|
|||
public void setDetails(Details details) { |
|||
this.details = details; |
|||
} |
|||
} |
@ -0,0 +1,42 @@ |
|||
package io.sc.platform.ai.ollama.service.support.tags; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
import io.sc.platform.util.ObjectMapperUtil; |
|||
import okhttp3.*; |
|||
|
|||
import java.io.IOException; |
|||
|
|||
public class TagsApi extends OllamaApi { |
|||
public TagsApi(String baseUrl){ |
|||
super("/api/tags","GET"); |
|||
this.baseUrl =baseUrl; |
|||
} |
|||
|
|||
public TagsResponse execute() { |
|||
Call call =createRequestCall(); |
|||
if(call==null) { return null; } |
|||
try { |
|||
Response response = call.execute(); |
|||
ResponseBody body =response.body(); |
|||
TagsResponse tagsResponse = ObjectMapperUtil.json().readValue(body.source().readUtf8(), TagsResponse.class); |
|||
return tagsResponse; |
|||
} catch (IOException e) { |
|||
throw new RuntimeException(e); |
|||
} |
|||
} |
|||
|
|||
private Call createRequestCall() { |
|||
OkHttpClient client = new OkHttpClient.Builder() |
|||
.connectTimeout(this.connectTimeout) |
|||
.readTimeout(this.readTimeout) |
|||
.writeTimeout(this.writeTimeout) |
|||
.build(); |
|||
Request request = new Request.Builder() |
|||
.url(this.baseUrl + this.url) |
|||
.get() |
|||
.build(); |
|||
|
|||
Call call = client.newCall(request); |
|||
return call; |
|||
} |
|||
} |
@ -0,0 +1,19 @@ |
|||
package io.sc.platform.ai.ollama.service.support.tags; |
|||
|
|||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; |
|||
|
|||
import java.util.ArrayList; |
|||
import java.util.List; |
|||
|
|||
@JsonIgnoreProperties(ignoreUnknown=true) |
|||
public class TagsResponse { |
|||
private List<Model> models =new ArrayList<>(); |
|||
|
|||
public List<Model> getModels() { |
|||
return models; |
|||
} |
|||
|
|||
public void setModels(List<Model> models) { |
|||
this.models = models; |
|||
} |
|||
} |
@ -0,0 +1,11 @@ |
|||
package io.sc.platform.ai.ollama.service.support.version; |
|||
|
|||
import io.sc.platform.ai.ollama.OllamaApi; |
|||
|
|||
public class OllamaVersionApi extends OllamaApi { |
|||
public OllamaVersionApi(){ |
|||
super("/api/version","GET"); |
|||
} |
|||
|
|||
|
|||
} |
@ -0,0 +1,16 @@ |
|||
/* |
|||
* 自动组件扫描插件配置 |
|||
* 功能: 该插件配置为框架提供自动扫描组件的包名,配置的包名将会自动被 spring 进行扫描 |
|||
* 使用说明: |
|||
* includes: 包含自动扫描的包名列表 |
|||
* excludes: 排除自动扫描的包名列表 |
|||
* 注意: 当一个包名同时存在于 includes 和 excludes 中, excludes 优先, 即该包不会被自动扫描 |
|||
*/ |
|||
|
|||
{ |
|||
"includes":[ |
|||
"io.sc.platform.ai.ollama.controller", |
|||
"io.sc.platform.ai.ollama.service.impl" |
|||
], |
|||
"excludes":[] |
|||
} |
@ -0,0 +1,5 @@ |
|||
{ |
|||
"includes":[ |
|||
"io/sc/platform/ai/i18n/parameters" |
|||
] |
|||
} |
@ -0,0 +1,67 @@ |
|||
/* |
|||
* 系统参数贡献项配置示例 |
|||
* 功能说明: 为系统提供参数配置 |
|||
* 使用说明: |
|||
* id: 参数唯一标识 |
|||
* parentId: 父唯一标识,用于进行参数分类 |
|||
* code: 参数代码,应用可通过该代码获取参数值 |
|||
* defaultValue: 默认值 |
|||
* order: 排序 |
|||
*/ |
|||
|
|||
[ |
|||
//AI 服务器 |
|||
{"id":"parameter.ai","order":2000}, |
|||
//AI 服务器/是否开启智能对话 |
|||
{ |
|||
"id" :"parameter.ai.chat.enable", |
|||
"parentId" :"parameter.ai", |
|||
"code" :"parameter.ai.chat.enable", |
|||
"defaultValue" :"true", |
|||
"order" : 100, |
|||
"options" : { |
|||
"true" : "parameter.ai.chat.enable.options.enable", |
|||
"false" : "parameter.ai.chat.enable.options.disable" |
|||
} |
|||
}, |
|||
//AI 服务器/默认服务提供者 |
|||
{ |
|||
"id" :"parameter.ai.provider.default", |
|||
"parentId" :"parameter.ai", |
|||
"code" :"parameter.ai.provider.default", |
|||
"defaultValue" :"ollama", |
|||
"order" : 200, |
|||
"options" : { |
|||
"ollama" : "parameter.ai.provider.default.options.ollama" |
|||
} |
|||
}, |
|||
//AI 服务器/服务提供者 |
|||
{"id":"parameter.ai.provider", "parentId":"parameter.ai", "order":300}, |
|||
//AI 服务器/服务提供者/Ollama |
|||
{"id":"parameter.ai.provider.ollama", "parentId":"parameter.ai.provider", "order":100}, |
|||
//AI 服务器/服务提供者/Ollama/api url |
|||
{ |
|||
"id" :"parameter.ai.provider.ollama.apiUrl", |
|||
"parentId" :"parameter.ai.provider.ollama", |
|||
"code" :"parameter.ai.provider.ollama.apiUrl", |
|||
"defaultValue" :"http://localhost:11434", |
|||
"order" : 100 |
|||
}, |
|||
//AI 服务器/服务提供者/Ollama/默认模型名称 |
|||
{ |
|||
"id" :"parameter.ai.provider.ollama.defaultModelName", |
|||
"parentId" :"parameter.ai.provider.ollama", |
|||
"code" :"parameter.ai.provider.ollama.defaultModelName", |
|||
"defaultValue" :"deepseek-r1:14b", |
|||
"order" : 200, |
|||
"options" : { |
|||
"deepseek-r1:1.5b" : "deepseek-r1:1.5b", |
|||
"deepseek-r1:7b" : "deepseek-r1:7b", |
|||
"deepseek-r1:8b" : "deepseek-r1:8b", |
|||
"deepseek-r1:14b" : "deepseek-r1:14b", |
|||
"deepseek-r1:32b" : "deepseek-r1:32b", |
|||
"deepseek-r1:70b" : "deepseek-r1:70b", |
|||
"deepseek-r1:671b" : "deepseek-r1:671b" |
|||
} |
|||
} |
|||
] |
@ -0,0 +1,4 @@ |
|||
{ |
|||
"permitPatterns":[ |
|||
] |
|||
} |
@ -0,0 +1,12 @@ |
|||
parameter.ai=AI |
|||
parameter.ai.chat.enable=AI Chat Enable |
|||
parameter.ai.chat.enable.options.enable=Enable |
|||
parameter.ai.chat.enable.options.disable=Disable |
|||
|
|||
parameter.ai.provider.default=Model Provider (default) |
|||
parameter.ai.provider.default.options.ollama=Ollama |
|||
|
|||
parameter.ai.provider=Model Providers |
|||
parameter.ai.provider.ollama=Ollama |
|||
parameter.ai.provider.ollama.apiUrl=Api URL |
|||
parameter.ai.provider.ollama.defaultModelName=Default Model Name |
@ -0,0 +1,12 @@ |
|||
parameter.ai=\u4EBA\u5DE5\u667A\u80FD |
|||
parameter.ai.chat.enable=\u662F\u5426\u958B\u555F\u667A\u80FD\u5C0D\u8A71 |
|||
parameter.ai.chat.enable.options.enable=\u958B\u555F |
|||
parameter.ai.chat.enable.options.disable=\u95DC\u9589 |
|||
|
|||
parameter.ai.provider.default=\u9ED8\u8A8D\u6A21\u578B\u63D0\u4F9B\u5546 |
|||
parameter.ai.provider.default.options.ollama=Ollama |
|||
|
|||
parameter.ai.provider=\u6A21\u578B\u63D0\u4F9B\u5546 |
|||
parameter.ai.provider.ollama=Ollama |
|||
parameter.ai.provider.ollama.apiUrl=Api URL |
|||
parameter.ai.provider.ollama.defaultModelName=\u9ED8\u8A8D\u6A21\u578B\u540D\u7A31 |
@ -0,0 +1,13 @@ |
|||
parameter.ai=\u4EBA\u5DE5\u667A\u80FD |
|||
|
|||
parameter.ai.chat.enable=\u662F\u5426\u5F00\u542F\u667A\u80FD\u5BF9\u8BDD |
|||
parameter.ai.chat.enable.options.enable=\u5F00\u542F |
|||
parameter.ai.chat.enable.options.disable=\u5173\u95ED |
|||
|
|||
parameter.ai.provider.default=\u9ED8\u8BA4\u6A21\u578B\u63D0\u4F9B\u5546 |
|||
parameter.ai.provider.default.options.ollama=Ollama |
|||
|
|||
parameter.ai.provider=\u6A21\u578B\u63D0\u4F9B\u5546 |
|||
parameter.ai.provider.ollama=Ollama |
|||
parameter.ai.provider.ollama.apiUrl=Api URL |
|||
parameter.ai.provider.ollama.defaultModelName=\u9ED8\u8BA4\u6A21\u578B\u540D\u79F0 |
@ -1,6 +1,5 @@ |
|||
dependencies { |
|||
api( |
|||
project(":io.sc.platform.app"), |
|||
project(":io.sc.platform.springcloud.nacos"), |
|||
project(":io.sc.platform.springcloud.sentinel"), |
|||
) |
|||
|
Some files were not shown because too many files changed in this diff
Loading…
Reference in new issue