Compare commits

..

No commits in common. "13d418879426e139081f7e4f121bc46f69151b50" and "93670ee60d464a6e65da17a952369b8566fa240f" have entirely different histories.

3 changed files with 1 additions and 101 deletions

View File

@ -1,17 +0,0 @@
package com.idata.tools.relationcompletion;
import com.idata.tools.relationcompletion.completion.RelationCompletion;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ConfigurableApplicationContext;
@SpringBootApplication
public class RelationCompletionApplication {
public static void main(String[] args) {
ConfigurableApplicationContext context = SpringApplication.run(RelationCompletionApplication.class, args);
RelationCompletion relationCompletion = context.getBean(RelationCompletion.class);
relationCompletion.relationCompletion();
}
}

View File

@ -25,7 +25,6 @@ import java.util.concurrent.CompletableFuture;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Stream;
/**
@ -77,28 +76,20 @@ public class RelationCompletion {
SessionPool sessionPool = graphCommonService.newPoolConfig();
for (TagEnum tagEnum : TagEnum.values()) {
String tag = tagEnum.getTag();
log.info("开启{}类型的点为起始节点反向关系补全", tag);
List<NebulaMultiMatchJsonResult> idJsonResults =
graphCommonService.executeJson(NebulaUtil.getTagIds(spaceName, tag), NebulaMultiMatchJsonResult.class);
List<String> idLists = NebulaMultiMatchJsonResult.getIdLists(idJsonResults.get(0));
map.put(tag, idLists);
// 遍历边
EdgeEnum[] edgeEnums = EdgeEnum.values();
AtomicLong atomicLong = new AtomicLong(0);
Stream.of(edgeEnums)
.forEach(edgeEnum -> {
log.info("开启{}类型的点, 为起始节点; {}类型的边的反向关系补全, ", tag, edgeEnum.getEdgeType(),
"反向关系为: {}", edgeEnum.getReverseRelation());
CompletableFuture<Void> reverseFuture = CompletableFuture.runAsync(() -> {
String srcTag = edgeEnum.getSrcTag();
String dstTag = edgeEnum.getDstTag();
String edgeType = edgeEnum.getEdgeType();
String reverseRelation = edgeEnum.getReverseRelation();
for (String id : idLists) {
atomicLong.incrementAndGet();
if (atomicLong.get() / 1000 == 0) {
log.info("进行{}类型的点, 为起始节点; {}类型的边的反向关系补全, ", tag, edgeEnum.getEdgeType(),
"反向关系为: {}", edgeEnum.getReverseRelation(), "进度: {}", atomicLong.get(), "/", idLists.size());
}
List<NebulaVertexJsonResult> vertexJsonResults = graphCommonService.executeJsonWithoutLogs(
NebulaUtil.getDstVerticesById(spaceName, srcTag, id, edgeType),
NebulaVertexJsonResult.class,
@ -110,11 +101,8 @@ public class RelationCompletion {
}
}, threadPoolExecutor);
reverseFuture.join();
atomicLong.set(0);
});
}
sessionPool.release();
threadPoolExecutor.shutdown();
}
/**

View File

@ -1,71 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- scan为true时如果配置文件发生改变将会进行重新加载 -->
<!-- scanPeriod属性设置监测配置文件修改的时间间隔默认单位为毫秒在scan为true时才生效 -->
<!-- debug:当此属性设置为true时将打印出logback内部日志信息实时查看logback运行状态。默认值为false。 -->
<configuration scan="true" scanPeriod="60 seconds">
<contextName>logback</contextName>
<property name="log.path" value="./logs" />
<property name="app.name" value="relation-completion"/>
<!-- 彩色日志 -->
<!-- 彩色日志依赖的渲染类 -->
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<!-- 彩色日志格式 -->
<property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) | %clr(${PID:- }){magenta} | %clr(%15.15t){faint} | %X{TRACE_ID} | %X{TRACE_ID} | %clr(%-40.40logger{39}:%-4.4line){cyan} | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<!-- 日志格式 -->
<property name="LOG_PATTERN" value="${LOG_PATTERN:-%d{yyyy-MM-dd HH:mm:ss.SSS} | ${LOG_LEVEL_PATTERN:-%5p} | ${PID:- } | %15.15t | %X{TRACE_ID} |%-40.40logger{39}:%-4.4line | %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<!--输出到控制台-->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>UTF-8</charset>
</encoder>
</appender>
<!--输出到文件,并按日期归档-->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/${app.name}.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM-dd}/${app.name}.%i.log</fileNamePattern>
<!-- 日志文件最大限制 -->
<maxFileSize>16MB</maxFileSize>
<!-- 日志保留天数 -->
<maxHistory>30</maxHistory>
<totalSizeCap>512MB</totalSizeCap>
</rollingPolicy>
<encoder>
<pattern>${LOG_PATTERN}</pattern>
<charset>UTF-8</charset>
</encoder>
</appender>
<!--输出错误日志文件-->
<appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM-dd}/error.%i.log</fileNamePattern>
<maxFileSize>16MB</maxFileSize>
<maxHistory>30</maxHistory>
<totalSizeCap>512MB</totalSizeCap>
</rollingPolicy>
<encoder>
<pattern>${LOG_PATTERN}</pattern>
<charset>UTF-8</charset>
</encoder>
<!--过滤掉所有低于该级别的日志-->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
</appender>
<root level="INFO">
<appender-ref ref="CONSOLE" />
<appender-ref ref="FILE" />
<appender-ref ref="ERROR_FILE"/>
</root>
</configuration>