Skip to content

Commit

Permalink
Merge pull request #107 from xtreme1-io/dev
Browse files Browse the repository at this point in the history
Release v0.6.0
  • Loading branch information
jaggerwang committed Apr 10, 2023
2 parents 8e81625 + 834cdd2 commit 0964a22
Show file tree
Hide file tree
Showing 336 changed files with 22,568 additions and 2,042 deletions.
2 changes: 1 addition & 1 deletion .ops/.gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ stages:

variables:
APP_NAME: "xtreme1"
APP_VERSION: "0.5.6"
APP_VERSION: "0.6.0"

FRONTEND_PACKAGE_DIR: "dist"
BACKEND_PACKAGE_NAME: "${APP_NAME}-backend-${APP_VERSION}-SNAPSHOT.jar"
Expand Down
6 changes: 4 additions & 2 deletions .ops/backend.dockerfile
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
FROM openjdk:11

RUN apt update && \
apt install -y iputils-ping curl wget netcat

apt install -y iputils-ping curl wget netcat python3 python3-pip
RUN pip3 install --upgrade --force-reinstall git+https://github.com/xtreme1-io/xtreme1-sdk.git@97f0e90
WORKDIR /app
COPY target/$BACKEND_PACKAGE_NAME ./app.jar
RUN mkdir -p config
RUN wget 'https://basicai-asset.s3.us-west-2.amazonaws.com/xtreme1/xtreme1-lidar-fusion-trial.zip' -O xtreme1-lidar-fusion-trial.zip
RUN wget 'https://basicai-asset.s3.us-west-2.amazonaws.com/xtreme1/xtreme1-image-trial.zip' -O xtreme1-image-trial.zip

EXPOSE 8080

Expand Down
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<div align="center">
<img width="386" alt="Xtreme1 logo" src="https://user-images.githubusercontent.com/84139543/190300943-98da7d5c-bd67-4074-a94f-b7405d29fb90.png">

![](https://img.shields.io/badge/Release-v0.5.6-green)
![](https://img.shields.io/badge/Release-v0.6.0-green)
![](https://img.shields.io/badge/License-Apache%202.0-blueviolet)
[![Slack](https://img.shields.io/badge/Join-Slack-orange.svg?logo=slack)](https://join.slack.com/t/xtreme1group/shared_invite/zt-1jhk36uzr-NpdpYXeQAEHN6rYJy5_6pg)
[![Twitter](https://img.shields.io/badge/Follow-Twitter-blue)](https://twitter.com/Xtreme1io)
Expand Down Expand Up @@ -58,8 +58,8 @@ Image Data Curation (Visualizing & Debug) - [MobileNetV3](https://github.com/xi
Download the latest release package and unzip it.

```bash
wget https://github.com/xtreme1-io/xtreme1/releases/download/v0.5.6/xtreme1-v0.5.6.zip
unzip -d xtreme1-v0.5.6 xtreme1-v0.5.6.zip
wget https://github.com/xtreme1-io/xtreme1/releases/download/v0.6.0/xtreme1-v0.6.0.zip
unzip -d xtreme1-v0.6.0 xtreme1-v0.6.0.zip
```

## Start all services
Expand Down
6 changes: 4 additions & 2 deletions backend/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,12 @@ RUN --mount=type=cache,target=/root/.m2 mvn package

FROM openjdk:11-jre
RUN apt update && \
apt install -y iputils-ping curl wget netcat
apt install -y iputils-ping curl wget netcat python3 python3-pip git
RUN pip3 install --upgrade --force-reinstall git+https://github.com/xtreme1-io/xtreme1-sdk.git@97f0e90
WORKDIR /app
COPY --from=build /build/target/xtreme1-backend-0.5.6-SNAPSHOT.jar ./app.jar
COPY --from=build /build/target/xtreme1-backend-0.6.0-SNAPSHOT.jar ./app.jar
RUN mkdir -p config
RUN wget 'https://basicai-asset.s3.us-west-2.amazonaws.com/xtreme1/xtreme1-lidar-fusion-trial.zip' -O xtreme1-lidar-fusion-trial.zip
RUN wget 'https://basicai-asset.s3.us-west-2.amazonaws.com/xtreme1/xtreme1-image-trial.zip' -O xtreme1-image-trial.zip
EXPOSE 8080
CMD java -jar app.jar
2 changes: 1 addition & 1 deletion backend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ cd backend
mvn package

# Using local configuration to start application.
java -Dspring.profiles.active=local -jar target/xtreme1-backend-0.5.6-SNAPSHOT.jar
java -Dspring.profiles.active=local -jar target/xtreme1-backend-0.6.0-SNAPSHOT.jar
```

Now you can access the backend service at `http://localhost:8080/`.
Expand Down
2 changes: 1 addition & 1 deletion backend/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

<groupId>ai.basic</groupId>
<artifactId>xtreme1-backend</artifactId>
<version>0.5.6-SNAPSHOT</version>
<version>0.6.0-SNAPSHOT</version>
<name>Xtreme1 Backend</name>
<description></description>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,4 +186,11 @@ public DatasetSimilarityJobUseCase datasetSimilarityJobUseCase() {
public ModelRecognitionUseCase modelRecognitionUseCase() {
return new ModelRecognitionUseCase();
}

@Bean
public ModelRunRecordUseCase modelRunRecordUseCase() {
return new ModelRunRecordUseCase();
}


}
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;

Expand All @@ -14,11 +15,9 @@
* @author fyb
*/
@Data
@Builder
@SuperBuilder
@NoArgsConstructor
@AllArgsConstructor
@Component
@ConfigurationProperties(prefix = "dataset-initial.dataset")
public class DatasetInitialInfo {

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package ai.basic.x1.adapter.api.config;

import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;

/**
* @author fyb
*/
@Data
@SuperBuilder
@NoArgsConstructor
@Component
@ConfigurationProperties(prefix = "dataset-initial.dataset.image")
public class ImageDatasetInitialInfo extends DatasetInitialInfo{

}
81 changes: 65 additions & 16 deletions backend/src/main/java/ai/basic/x1/adapter/api/config/JobConfig.java
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
package ai.basic.x1.adapter.api.config;

import ai.basic.x1.adapter.api.job.ModelJobConsumerListener;
import ai.basic.x1.adapter.api.job.ModelRunErrorHandler;
import ai.basic.x1.adapter.api.job.PreLabelModelMessageHandler;
import ai.basic.x1.adapter.api.job.PredictImageCo80ModelHandler;
import ai.basic.x1.adapter.api.job.*;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
Expand Down Expand Up @@ -34,12 +31,25 @@
public class JobConfig {
private static final int PROCESSORS = Runtime.getRuntime().availableProcessors();
@Bean
public Executor redisStreamExecutor() {
public Executor dataRedisStreamExecutor() {
AtomicInteger index = new AtomicInteger(1);
ThreadPoolExecutor executor = new ThreadPoolExecutor(PROCESSORS, PROCESSORS, 0, TimeUnit.SECONDS,
new LinkedBlockingDeque<>(), r -> {
Thread thread = new Thread(r);
thread.setName("redisConsumer-executor" + index.getAndIncrement());
thread.setName("dataRedisConsumer-executor" + index.getAndIncrement());
thread.setDaemon(true);
return thread;
});
return executor;
}

@Bean
public Executor datasetRedisStreamExecutor() {
AtomicInteger index = new AtomicInteger(1);
ThreadPoolExecutor executor = new ThreadPoolExecutor(PROCESSORS, PROCESSORS, 0, TimeUnit.SECONDS,
new LinkedBlockingDeque<>(), r -> {
Thread thread = new Thread(r);
thread.setName("datasetRedisConsumer-executor" + index.getAndIncrement());
thread.setDaemon(true);
return thread;
});
Expand All @@ -60,22 +70,22 @@ public Executor similarityExecutor() {
}

@Bean(initMethod = "start", destroyMethod = "stop")
public StreamMessageListenerContainer<String, ObjectRecord<String, String>> streamMessageListenerContainer(Executor redisStreamExecutor,
public StreamMessageListenerContainer<String, ObjectRecord<String, String>> dataStreamMessageListenerContainer(Executor dataRedisStreamExecutor,
RedisConnectionFactory redisConnectionFactory,
RedisTemplate redisTemplate,
ApplicationContext applicationContext
) {

try {
redisTemplate.opsForStream().createGroup(MODEL_RUN_STREAM_KEY, MODEL_RUN_CONSUMER_GROUP);
redisTemplate.opsForStream().createGroup(DATA_MODEL_RUN_STREAM_KEY, MODEL_RUN_CONSUMER_GROUP);
} catch (RedisSystemException redisSystemException) {
//no do
}
StreamMessageListenerContainer.StreamMessageListenerContainerOptions<String, ObjectRecord<String, String>> options =
StreamMessageListenerContainer.StreamMessageListenerContainerOptions
.builder()
.batchSize(10)
.executor(redisStreamExecutor)
.executor(dataRedisStreamExecutor)
.keySerializer(RedisSerializer.string())
.hashKeySerializer(RedisSerializer.string())
.hashValueSerializer(RedisSerializer.string())
Expand All @@ -87,24 +97,63 @@ public StreamMessageListenerContainer<String, ObjectRecord<String, String>> stre
.build();
StreamMessageListenerContainer<String, ObjectRecord<String, String>> streamMessageListenerContainer =
StreamMessageListenerContainer.create(redisConnectionFactory, options);
StreamMessageListenerContainer.ConsumerStreamReadRequest<String> streamReadRequest = StreamMessageListenerContainer
StreamMessageListenerContainer.ConsumerStreamReadRequest<String> dataStreamReadRequest = StreamMessageListenerContainer
.StreamReadRequest
.builder(StreamOffset.create(MODEL_RUN_STREAM_KEY, ReadOffset.lastConsumed()))
.builder(StreamOffset.create(DATA_MODEL_RUN_STREAM_KEY, ReadOffset.lastConsumed()))
.consumer(Consumer.from(MODEL_RUN_CONSUMER_GROUP, MODEL_RUN_CONSUMER_NAME))
.autoAcknowledge(false)
.cancelOnError(throwable -> false)
.build();
streamMessageListenerContainer.register(streamReadRequest, new ModelJobConsumerListener(MODEL_RUN_STREAM_KEY, MODEL_RUN_CONSUMER_GROUP, redisTemplate, applicationContext));
streamMessageListenerContainer.register(dataStreamReadRequest, new DataModelJobConsumerListener(DATA_MODEL_RUN_STREAM_KEY, MODEL_RUN_CONSUMER_GROUP, redisTemplate, applicationContext));
return streamMessageListenerContainer;
}

@Bean(initMethod = "start", destroyMethod = "stop")
public StreamMessageListenerContainer<String, ObjectRecord<String, String>> streamMessageListenerContainerDataset(Executor datasetRedisStreamExecutor,
RedisConnectionFactory redisConnectionFactory,
RedisTemplate redisTemplate,
ApplicationContext applicationContext
) {
try {
redisTemplate.opsForStream().createGroup(DATASET_MODEL_RUN_STREAM_KEY, DATASET_MODEL_RUN_CONSUMER_GROUP);
} catch (RedisSystemException redisSystemException) {
//no do
}
StreamMessageListenerContainer.StreamMessageListenerContainerOptions<String, ObjectRecord<String, String>> options =
StreamMessageListenerContainer.StreamMessageListenerContainerOptions
.builder()
.batchSize(10)
.executor(datasetRedisStreamExecutor)
.keySerializer(RedisSerializer.string())
.hashKeySerializer(RedisSerializer.string())
.hashValueSerializer(RedisSerializer.string())
// less than `spring.redis.timeout`
.pollTimeout(Duration.ofSeconds(1))
.objectMapper(new ObjectHashMapper())
.errorHandler(new ModelRunErrorHandler())
.targetType(String.class)
.build();
StreamMessageListenerContainer<String, ObjectRecord<String, String>> streamMessageListenerContainer =
StreamMessageListenerContainer.create(redisConnectionFactory, options);

StreamMessageListenerContainer.ConsumerStreamReadRequest<String> datasetStreamReadRequest = StreamMessageListenerContainer
.StreamReadRequest
.builder(StreamOffset.create(DATASET_MODEL_RUN_STREAM_KEY, ReadOffset.lastConsumed()))
.consumer(Consumer.from(DATASET_MODEL_RUN_CONSUMER_GROUP, DATASET_MODEL_RUN_CONSUMER_NAME))
.autoAcknowledge(false)
.cancelOnError(throwable -> false)
.build();
streamMessageListenerContainer.register(datasetStreamReadRequest, new DatasetModelJobConsumerListener(DATASET_MODEL_RUN_STREAM_KEY, DATASET_MODEL_RUN_CONSUMER_GROUP, redisTemplate, applicationContext));
return streamMessageListenerContainer;
}

@Bean
public PreLabelModelMessageHandler preLabelModelMessageHandler() {
return new PreLabelModelMessageHandler();
public PointCloudDetectionModelMessageHandler pointCloudDetectionModelMessageHandler() {
return new PointCloudDetectionModelMessageHandler();
}

@Bean
public PredictImageCo80ModelHandler predictImageCo80ModelHandler() {
return new PredictImageCo80ModelHandler();
public ImageDetectionModelHandler imageDetectionModelHandler() {
return new ImageDetectionModelHandler();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package ai.basic.x1.adapter.api.config;

import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;

/**
* @author fyb
*/
@Data
@SuperBuilder
@NoArgsConstructor
@Component
@ConfigurationProperties(prefix = "dataset-initial.dataset.point-cloud")
public class PointCloudDatasetInitialInfo extends DatasetInitialInfo{

}
Original file line number Diff line number Diff line change
Expand Up @@ -2,29 +2,35 @@

import ai.basic.x1.adapter.api.annotation.user.LoggedUser;
import ai.basic.x1.adapter.dto.*;
import ai.basic.x1.adapter.dto.request.DataInfoSplitFilterDTO;
import ai.basic.x1.adapter.dto.request.DataInfoSplitReqDTO;
import ai.basic.x1.adapter.exception.ApiException;
import ai.basic.x1.entity.DataInfoQueryBO;
import ai.basic.x1.entity.DataInfoUploadBO;
import ai.basic.x1.entity.DataPreAnnotationBO;
import ai.basic.x1.entity.ScenarioQueryBO;
import ai.basic.x1.adapter.port.rpc.dto.DatasetModelResultDTO;
import ai.basic.x1.entity.*;
import ai.basic.x1.entity.enums.ModelCodeEnum;
import ai.basic.x1.entity.enums.ScenarioQuerySourceEnum;
import ai.basic.x1.entity.enums.SplitTargetDataTypeEnum;
import ai.basic.x1.entity.enums.SplitTypeEnum;
import ai.basic.x1.usecase.*;
import ai.basic.x1.usecase.exception.UsecaseCode;
import ai.basic.x1.util.DefaultConverter;
import ai.basic.x1.util.ModelParamUtils;
import ai.basic.x1.util.Page;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.util.EnumUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;

import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.NotNull;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
Expand All @@ -36,6 +42,7 @@
@RestController
@RequestMapping("/data/")
@Validated
@Slf4j
public class DataInfoController extends BaseDatasetController {

@Autowired
Expand Down Expand Up @@ -147,9 +154,35 @@ public DataAnnotationRecordDTO findDataIdsByRecordId(@PathVariable Long id, @Log
return DefaultConverter.convert(dataAnnotationRecordBO, DataAnnotationRecordDTO.class);
}

@GetMapping("findLockRecordByDatasetId")
public List<LockRecordDTO> findLockRecordByDatasetId(@NotNull(message = "datasetId cannot be null") @RequestParam(required = false) Long datasetId) {
return DefaultConverter.convert(dataAnnotationRecordUseCase.findLockRecordByDatasetId(datasetId), LockRecordDTO.class);
}

@PostMapping("unLockByLockRecordIds")
public void unLockByLockRecordIds(@RequestBody @Validated DataBatchUnlockDTO dataBatchUnlockDTO) {
dataAnnotationRecordUseCase.unLockByLockRecordIds(dataBatchUnlockDTO.getLockRecordIds());
}

@PostMapping("split/dataIds")
public void splitByDataIds(@RequestBody @Validated DataInfoSplitReqDTO dto) {
dataInfoUsecase.splitByDataIds(dto.getDataIds(), EnumUtil.fromString(SplitTypeEnum.class, dto.getSplitType()));
}

@PostMapping("split/filter")
public void splitByFilter(@RequestBody @Validated DataInfoSplitFilterDTO dto) {
dataInfoUsecase.splitByFilter(DefaultConverter.convert(dto, DataInfoSplitFilterBO.class));
}

@GetMapping("split/totalDataCount")
public Long getSplitDataTotalCount(@NotNull(message = "datasetId cannot be null") @RequestParam(required = false) Long datasetId,
@RequestParam(value = "targetDataType", required = false) SplitTargetDataTypeEnum targetDataType) {
return dataInfoUsecase.getSplitDataTotalCount(datasetId, targetDataType);
}

@PostMapping("deleteBatch")
public void deleteBatch(@RequestBody @Validated DataInfoDeleteDTO dto) {
dataInfoUsecase.deleteBatch(dto.getDatasetId(),dto.getIds());
dataInfoUsecase.deleteBatch(dto.getDatasetId(), dto.getIds());
}

@GetMapping("generatePresignedUrl")
Expand Down Expand Up @@ -251,4 +284,9 @@ public JSONObject getDataAndResult(@NotNull(message = "cannot be null") @Request
return JSONUtil.parseObj(JSONUtil.toJsonStr(dataInfoUsecase.getDataAndResult(datasetId, dataIds)));
}

@GetMapping("getDataModelRunResult/{dataId}")
public List<DatasetModelResultDTO> getDataModelRunResult(@PathVariable Long dataId) {
return DefaultConverter.convert(dataAnnotationObjectUseCase.getDataModelRunResult(dataId), DatasetModelResultDTO.class);
}

}
Loading

0 comments on commit 0964a22

Please sign in to comment.