基于ollama,langchain,springboot从零搭建知识库二【简单的与大模型进行对话】

构建springboot项目,基于jdk17

添加langchain4j依赖(本demo仅支持ollama)

构建好项目之后就开始写代码啦

首先修改application.yml

server:
  tomcat:
    uri-encoding: utf-8
  port: 8888
spring:
  # 应用名称
  application:
    name: studyllm
  # 默认执行的配置文件
  main:
    allow-bean-definition-overriding: true

接着,新增一个ChatController类

package com.example.studyllm.ollama;

import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.output.Response;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

@RestController
@RequestMapping("/ollama")
public class ChatController {

    @GetMapping("/chat")
    public String chat(String message){
        ChatLanguageModel model = buildModel();
        Response<AiMessage> response = model.generate
                (new SystemMessage("你是一个国产大模型,请使用中文回复所有问题")
                        ,new UserMessage(message));
        return response.content().text();
    }

    private ChatLanguageModel buildModel(){
        return OllamaChatModel.builder()
                .baseUrl("http://127.0.0.1:11434")
                .modelName("qwen:4b")
                .temperature(0.1)
                .logRequests(true)
                .logResponses(true)
                .build();
    }
}

至此,就可以通过http://localhost:8888/ollama/chat 接口跟大模型进行对话啦

若需要使用流式输出可以调整为:

在pom中引入依赖:

 <dependency>
            <groupId>dev.langchain4j</groupId>
            <artifactId>langchain4j-reactor</artifactId>
            <version>0.36.0</version>
        </dependency>
package com.example.studyllm.ollama;

import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;

@RestController
@RequestMapping("/ollama")
public class ChatController {

    @GetMapping(value="/chat", produces = "text/event-stream")
    public Flux<String> chat(){
        String message = "中國首都是哪裡";
        StreamingChatLanguageModel model = buildModel();
        Assistant assistant = AiServices.builder(Assistant.class)
                .streamingChatLanguageModel(model)
                .chatMemory(MessageWindowChatMemory.withMaxMessages(10))
                .build();

        return assistant.chat(message);
    }

    private StreamingChatLanguageModel buildModel(){
        return OllamaStreamingChatModel.builder()
                .baseUrl("http://127.0.0.1:11434")
                .modelName("qwen-4b")
                .temperature(0.1)
                .build();
    }
}

猜你喜欢

转载自blog.csdn.net/wang_keng/article/details/145261917
今日推荐