Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Develop #53

Merged
merged 3 commits into from
Jan 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ dependencies {
implementation 'org.springframework.boot:spring-boot-starter-validation'
implementation group: 'io.github.dailyon-maven', name: 'daily-on-common', version: '0.0.9'

implementation 'org.springframework.kafka:spring-kafka'
implementation 'org.springframework.boot:spring-boot-starter-webflux'
implementation 'io.jsonwebtoken:jjwt-api:0.11.5'
implementation 'org.springframework.cloud:spring-cloud-starter-circuitbreaker-reactor-resilience4j'
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package com.dailyon.auctionservice.infra.kafka;

import com.dailyon.auctionservice.infra.kafka.dto.BiddingDTO;
import com.dailyon.auctionservice.service.AuctionHistoryService;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.RequiredArgsConstructor;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

@Component
@RequiredArgsConstructor
public class AuctionEventListener {

private final ObjectMapper objectMapper;
private final AuctionHistoryService auctionHistoryService;

@KafkaListener(topics = "cancel-bidding")
public void cancel(String message, Acknowledgment ack) {
BiddingDTO biddingDTO = null;
try {
biddingDTO = objectMapper.readValue(message, BiddingDTO.class);
auctionHistoryService.delete(biddingDTO);
ack.acknowledge();
} catch (JsonProcessingException e) {
e.printStackTrace();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
package com.dailyon.auctionservice.infra.kafka;

import com.dailyon.auctionservice.infra.kafka.dto.BiddingDTO;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;

@Slf4j
@Component
@RequiredArgsConstructor
public class AuctionEventProducer {

private final KafkaTemplate<String, String> kafkaTemplate;
private final ObjectMapper objectMapper;

public void createAuctionHistory(BiddingDTO biddingDTO) {
log.info("createAuctionHistory -> memberId {}", biddingDTO.getMemberId());
try {
kafkaTemplate.send("success-bidding", objectMapper.writeValueAsString(biddingDTO));
} catch (JsonProcessingException e) {
e.printStackTrace();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
package com.dailyon.auctionservice.infra.kafka.dto;

import lombok.*;

@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class BiddingDTO {
private String auctionId;
private Long memberId;
private Long usePoints;
}
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
package com.dailyon.auctionservice.service;

import com.dailyon.auctionservice.document.AuctionHistory;
import com.dailyon.auctionservice.infra.kafka.dto.BiddingDTO;
import com.dailyon.auctionservice.repository.AuctionHistoryRepository;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Mono;

import java.util.ArrayList;
import java.util.List;
Expand Down Expand Up @@ -46,4 +48,13 @@ public AuctionHistory getAuctionHistory(String memberId, String auctionId) {
.orElseThrow(() -> new RuntimeException("해당 경매 내역 정보가 존재하지 않습니다."));
return auctionHistory;
}

public void delete(BiddingDTO biddingDTO) {
AuctionHistory auctionHistory =
auctionHistoryRepository
.findByAuctionIdAndMemberId(
biddingDTO.getAuctionId(), String.valueOf(biddingDTO.getMemberId()))
.get();
auctionHistoryRepository.delete(auctionHistory);
}
}
29 changes: 21 additions & 8 deletions src/main/java/com/dailyon/auctionservice/service/BidService.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
import com.dailyon.auctionservice.dto.response.BidInfo;
import com.dailyon.auctionservice.dto.response.ReadAuctionDetailResponse;
import com.dailyon.auctionservice.dto.response.TopBidderResponse;
import com.dailyon.auctionservice.infra.kafka.AuctionEventProducer;
import com.dailyon.auctionservice.infra.kafka.dto.BiddingDTO;
import com.dailyon.auctionservice.infra.sqs.AuctionSqsProducer;
import com.dailyon.auctionservice.infra.sqs.dto.RawNotificationData;
import com.dailyon.auctionservice.infra.sqs.dto.SQSNotificationDto;
Expand Down Expand Up @@ -36,6 +38,7 @@ public class BidService {
private final ProductClient productClient;
private final AuctionSqsProducer auctionSqsProducer;
private final AuctionHistoryRepository auctionHistoryRepository;
private final AuctionEventProducer eventProducer;

public Mono<Long> create(CreateBidRequest request, String memberId) {
BidHistory bidHistory = request.toEntity(memberId);
Expand Down Expand Up @@ -77,10 +80,10 @@ private Mono<Void> processAuction(Auction auction, Long bid) {
Mono<ReadAuctionDetailResponse.ReadProductDetailResponse> productInfo =
productClient.readProductDetail(auction.getAuctionProductId());

return Mono.when(
saveSuccessfulBiddersHistory(productInfo, auction, bid),
saveRemainBiddersHistory(productInfo, auction, bid),
sendSqsNotification(auction));
return Mono.zip(
saveSuccessfulBiddersHistory(productInfo, auction, bid),
saveRemainBiddersHistory(productInfo, auction, bid))
.then(sendSqsNotification(auction));
}

private Mono<Void> saveSuccessfulBiddersHistory(
Expand All @@ -94,8 +97,16 @@ private Mono<Void> saveSuccessfulBiddersHistory(
tuple -> createAuctionHistories(auction, tuple.getT1(), tuple.getT2(), true));

return saveAuctionHistories(auctionHistories)
.then(auctionHistories.collectList())
// .doOnNext() kafka메세지 들어갈자리
.doOnNext(
auctionHistoryList -> {
for (AuctionHistory auctionHistory : auctionHistoryList) {
BiddingDTO biddingDTO = new BiddingDTO();
biddingDTO.setMemberId(Long.valueOf(auctionHistory.getMemberId()));
biddingDTO.setAuctionId(auctionHistory.getAuctionId());
biddingDTO.setUsePoints((long) (auctionHistory.getAuctionWinnerBid() * 0.05));
eventProducer.createAuctionHistory(biddingDTO);
}
})
.then();
}

Expand Down Expand Up @@ -147,7 +158,9 @@ private Flux<AuctionHistory> createAuctionHistories(
});
}

private Mono<Void> saveAuctionHistories(Flux<AuctionHistory> auctionHistories) {
return Mono.when(auctionHistories.map(auctionHistoryRepository::save).collectList());
private Mono<List<AuctionHistory>> saveAuctionHistories(Flux<AuctionHistory> auctionHistories) {
return auctionHistories
.collectList()
.flatMap(list -> Flux.fromIterable(list).map(auctionHistoryRepository::save).collectList());
}
}
17 changes: 17 additions & 0 deletions src/main/resources/application-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,23 @@ spring:
host: redis-cluster
port: 6379
password: 11111111
kafka:
producer:
bootstrap-servers: kafka-service:9092
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
# Leader Broker가 메시지를 받았는지 확인 신호 요청
acks: 1
consumer:
bootstrap-servers: kafka-service:9092
# 컨슈머 그룹 지정 - 컨슈머 그룹안의 하나의 컨슈머가 다운되더라도 컨슈머 그룹 안의 다른 컨슈머가 읽을 수 있도록 함 또한 Offset으로 재시작시 메시지 관리 가능
group-id: auction-service
# Kafka consumer가 다운되었을 때 가장 빠른 컨슈머 오프셋을 가지는 것 부터 읽는다.
auto-offset-reset: earliest
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
listener:
ack-mode: manual
eureka:
client:
register-with-eureka: true
Expand Down
18 changes: 18 additions & 0 deletions src/main/resources/application-prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,24 @@ spring:
nodes: redis-cluster-0.redis-cluster.prod.svc.cluster.local:6379,redis-cluster-1.redis-cluster.prod.svc.cluster.local:6379,redis-cluster-2.redis-cluster.prod.svc.cluster.local:6379,redis-cluster-3.redis-cluster.prod.svc.cluster.local:6379,redis-cluster-4.redis-cluster.prod.svc.cluster.local:6379,redis-cluster-5.redis-cluster.prod.svc.cluster.local:6379
max-redirects: 3
password:

kafka:
producer:
bootstrap-servers: kafka-service:9092
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
# Leader Broker가 메시지를 받았는지 확인 신호 요청
acks: 1
consumer:
bootstrap-servers: kafka-service:9092
# 컨슈머 그룹 지정 - 컨슈머 그룹안의 하나의 컨슈머가 다운되더라도 컨슈머 그룹 안의 다른 컨슈머가 읽을 수 있도록 함 또한 Offset으로 재시작시 메시지 관리 가능
group-id: auction-service
# Kafka consumer가 다운되었을 때 가장 빠른 컨슈머 오프셋을 가지는 것 부터 읽는다.
auto-offset-reset: earliest
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
listener:
ack-mode: manual
eureka:
client:
register-with-eureka: true
Expand Down
4 changes: 3 additions & 1 deletion src/test/resources/application.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@ spring:
enabled: false
discovery:
enabled: false

kafka:
consumer:
group-id: sjy
logging:
cloud:
aws:
Expand Down