|
|
|
package com.zhonglai.luhui.data.file.service.service;
|
|
|
|
|
|
|
|
import java.text.ParseException;
|
|
|
|
import java.text.SimpleDateFormat;
|
|
|
|
import java.time.Instant;
|
|
|
|
import java.time.LocalDateTime;
|
|
|
|
import java.time.OffsetDateTime;
|
|
|
|
import java.time.ZoneOffset;
|
|
|
|
import java.time.format.DateTimeFormatter;
|
|
|
|
import java.util.*;
|
|
|
|
import java.util.regex.Matcher;
|
|
|
|
import java.util.regex.Pattern;
|
|
|
|
|
|
|
|
import com.influxdb.client.InfluxDBClient;
|
|
|
|
import com.influxdb.client.InfluxDBClientFactory;
|
|
|
|
import com.influxdb.client.QueryApi;
|
|
|
|
import com.influxdb.client.WriteApiBlocking;
|
|
|
|
import com.influxdb.client.domain.Bucket;
|
|
|
|
import com.influxdb.client.domain.DeletePredicateRequest;
|
|
|
|
import com.influxdb.client.domain.Query;
|
|
|
|
import com.influxdb.client.domain.WritePrecision;
|
|
|
|
import com.influxdb.client.write.Point;
|
|
|
|
import com.influxdb.query.FluxRecord;
|
|
|
|
import com.influxdb.query.FluxTable;
|
|
|
|
import com.ruoyi.common.utils.DateUtils;
|
|
|
|
import com.ruoyi.common.utils.GsonConstructor;
|
|
|
|
import com.zhonglai.dao.BaseDao;
|
|
|
|
import com.zhonglai.luhui.data.file.service.dto.DeviceSensorData;
|
|
|
|
import com.zhonglai.luhui.data.file.service.util.InfluxDBFluxExpression;
|
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
|
|
|
public class InfluxDB2Service {
|
|
|
|
|
|
|
|
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
|
|
|
|
|
|
|
private static BaseDao baseDao = new BaseDao(new DataDBFactoryImp());
|
|
|
|
|
|
|
|
private static final String token = "YjJgRuCDnypQV4pHlzoixvdoiv237ybVvZ8zzOBfLdbXPbzmYYRi2uWGzXONqqLllhVq3wm03lOF2pl0e3uQHQ==";
|
|
|
|
private static final String orgID = "dfed6796541746a2";
|
|
|
|
private static final String org = "luhui";
|
|
|
|
private static final String url = "http://192.168.31.133:8086";
|
|
|
|
|
|
|
|
private Map<String,String> bucketMap= new HashMap<>();
|
|
|
|
|
|
|
|
{
|
|
|
|
InfluxDBClient influxDBClient = connect();
|
|
|
|
List<Bucket> list = influxDBClient.getBucketsApi().findBucketsByOrgName(org);
|
|
|
|
close(influxDBClient);
|
|
|
|
if(null != list && list.size() != 0)
|
|
|
|
{
|
|
|
|
for (Bucket bucket:list)
|
|
|
|
{
|
|
|
|
bucketMap.put(bucket.getName(),bucket.getId());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* 建立连接
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
private InfluxDBClient connect()
|
|
|
|
{
|
|
|
|
return connect(null);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* 建立连接
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
private InfluxDBClient connect(Integer writeTimeOut )
|
|
|
|
{
|
|
|
|
String connectUrl = url;
|
|
|
|
if(null != writeTimeOut)
|
|
|
|
{
|
|
|
|
connectUrl = url + "?writeTimeout="+writeTimeOut;
|
|
|
|
}
|
|
|
|
InfluxDBClient client = InfluxDBClientFactory.create(connectUrl, token.toCharArray());
|
|
|
|
|
|
|
|
return client;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* 关闭连接
|
|
|
|
* @param client
|
|
|
|
*/
|
|
|
|
private void close(InfluxDBClient client)
|
|
|
|
{
|
|
|
|
if(null != client)
|
|
|
|
{
|
|
|
|
client.close();
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* 检测存储桶是否存在
|
|
|
|
* @param bucketName
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
private boolean checkAndCreateBucket( String bucketName) {
|
|
|
|
if (null != bucketMap.get(bucketName) && bucketMap.containsKey(bucketName))
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
InfluxDBClient client = connect();
|
|
|
|
Bucket buckets = client.getBucketsApi().findBucketByName(bucketName);
|
|
|
|
if (null == buckets) {
|
|
|
|
// 没有设置任何保留策略,所以这个bucket的数据将被永久保留
|
|
|
|
buckets = client.getBucketsApi().createBucket(bucketName,orgID);
|
|
|
|
|
|
|
|
logger.info("Bucket 创建成功: " + buckets.getName());
|
|
|
|
}
|
|
|
|
close(client);
|
|
|
|
bucketMap.put(bucketName,buckets.getId());
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
private Point deviceSensorDataToPoint( DeviceSensorData deviceSensorData)
|
|
|
|
{
|
|
|
|
String[] separationstr = separationDeviceInfoId(deviceSensorData.getDevice_info_id());
|
|
|
|
|
|
|
|
Point point = Point.measurement(separationstr[0])
|
|
|
|
.addTag("sn",separationstr[1])
|
|
|
|
.addTag("type",deviceSensorData.getData_type())
|
|
|
|
.time(deviceSensorData.getCreat_time(),WritePrecision.S)
|
|
|
|
;
|
|
|
|
if(isInteger(deviceSensorData.getData_value()))
|
|
|
|
{
|
|
|
|
point.addField("value",Double.parseDouble(deviceSensorData.getData_value()));
|
|
|
|
}else if(isDecimal(deviceSensorData.getData_value()))
|
|
|
|
{
|
|
|
|
point.addField("value",Double.parseDouble(deviceSensorData.getData_value()));
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
point .addField("value",deviceSensorData.getData_value());
|
|
|
|
}
|
|
|
|
|
|
|
|
return point;
|
|
|
|
}
|
|
|
|
|
|
|
|
private String[] separationDeviceInfoId(String deviceInfoId)
|
|
|
|
{
|
|
|
|
int i = deviceInfoId.indexOf("_");
|
|
|
|
|
|
|
|
String measurementName = "";
|
|
|
|
String baseTag = "";
|
|
|
|
if(i>0)
|
|
|
|
{
|
|
|
|
measurementName = deviceInfoId.substring(0,i);
|
|
|
|
baseTag = deviceInfoId.substring(i+1,deviceInfoId.length());
|
|
|
|
}else {
|
|
|
|
measurementName = deviceInfoId;
|
|
|
|
}
|
|
|
|
|
|
|
|
return new String[]{measurementName,baseTag};
|
|
|
|
}
|
|
|
|
|
|
|
|
public static boolean isInteger(String input) {
|
|
|
|
Pattern pattern = Pattern.compile("^[-+]?\\d+$");
|
|
|
|
Matcher matcher = pattern.matcher(input);
|
|
|
|
return matcher.matches();
|
|
|
|
}
|
|
|
|
|
|
|
|
public static boolean isDecimal(String input) {
|
|
|
|
Pattern pattern = Pattern.compile("^[-+]?\\d+\\.\\d+$");
|
|
|
|
Matcher matcher = pattern.matcher(input);
|
|
|
|
return matcher.matches();
|
|
|
|
}
|
|
|
|
|
|
|
|
public void writeData( String bucket,List<DeviceSensorData> dataList)
|
|
|
|
{
|
|
|
|
|
|
|
|
if(null == dataList && dataList.size()==0)
|
|
|
|
{
|
|
|
|
return ;
|
|
|
|
}
|
|
|
|
InfluxDBClient influxDBClient = connect(60000);
|
|
|
|
WriteApiBlocking writeApi = influxDBClient.getWriteApiBlocking();
|
|
|
|
|
|
|
|
List<Point> saveList = new ArrayList<>();
|
|
|
|
for (DeviceSensorData deviceSensorData:dataList)
|
|
|
|
{
|
|
|
|
Point data = deviceSensorDataToPoint(deviceSensorData);
|
|
|
|
saveList.add(data);
|
|
|
|
}
|
|
|
|
writeApi.writePoints(bucket, orgID, saveList);
|
|
|
|
close(influxDBClient);
|
|
|
|
}
|
|
|
|
|
|
|
|
private void mysqlToInfluxDB(String databaseName, String tableName)
|
|
|
|
{
|
|
|
|
logger.info("开始时间:"+ LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")));
|
|
|
|
|
|
|
|
long time = System.currentTimeMillis();
|
|
|
|
List<Map<String, Object>> device_modellist = baseDao.findBysql("SELECT DISTINCT device_model FROM "+databaseName+"."+tableName);
|
|
|
|
logger.info("查询设备类型用时:"+(System.currentTimeMillis()-time)/1000+"s");
|
|
|
|
|
|
|
|
if(null != device_modellist && device_modellist.size() != 0)
|
|
|
|
{
|
|
|
|
for (Map<String, Object> map:device_modellist)
|
|
|
|
{
|
|
|
|
String device_model = map.get("device_model")+"";
|
|
|
|
checkAndCreateBucket(device_model);
|
|
|
|
|
|
|
|
String sql = "select * from "+databaseName+"."+tableName +" where device_model='"+device_model+"'";
|
|
|
|
|
|
|
|
int pageNo = 1;
|
|
|
|
int pageSize = 10000;
|
|
|
|
time = System.currentTimeMillis();
|
|
|
|
List<DeviceSensorData> list = baseDao.findBysql(DeviceSensorData.class,sql+getlimit(pageNo++,pageSize));
|
|
|
|
writeData(device_model,list);
|
|
|
|
logger.info("处理第"+(pageNo-1)+"页时间用时:"+(System.currentTimeMillis()-time)/1000+"s");
|
|
|
|
while (null != list && list.size()>0)
|
|
|
|
{
|
|
|
|
time = System.currentTimeMillis();
|
|
|
|
list = baseDao.findBysql(DeviceSensorData.class,sql+getlimit(pageNo++,pageSize));
|
|
|
|
writeData(device_model,list);
|
|
|
|
logger.info("处理第"+(pageNo-1)+"页时间用时:"+(System.currentTimeMillis()-time)/1000+"s");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
logger.info("结束时间:"+ LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")));
|
|
|
|
}
|
|
|
|
|
|
|
|
public void synchronousMysqlToInfluxDBByTime(String time)
|
|
|
|
{
|
|
|
|
|
|
|
|
Calendar calendar = Calendar.getInstance();
|
|
|
|
SimpleDateFormat simpleDateFormat = new SimpleDateFormat( "yyyyMMdd");
|
|
|
|
|
|
|
|
try {
|
|
|
|
calendar.setTime(simpleDateFormat.parse(time));
|
|
|
|
} catch (ParseException e) {
|
|
|
|
throw new RuntimeException(e);
|
|
|
|
}
|
|
|
|
|
|
|
|
while (System.currentTimeMillis()-calendar.getTime().getTime()>=86400000) //一天前
|
|
|
|
{
|
|
|
|
Integer yea = calendar.get(Calendar.YEAR);
|
|
|
|
String day = simpleDateFormat.format(calendar.getTime());
|
|
|
|
|
|
|
|
String databaseName = "`ly_sensor_data_"+yea+"`";
|
|
|
|
String tableName = "`device_sensor_data_"+day+"`";
|
|
|
|
|
|
|
|
logger.info(databaseName+"."+tableName);
|
|
|
|
mysqlToInfluxDB(databaseName,tableName);
|
|
|
|
|
|
|
|
calendar.add(Calendar.DATE,1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private String getlimit(int pageNo,int pageSize)
|
|
|
|
{
|
|
|
|
String limint = " limit " + (pageNo - 1) * pageSize + "," + pageSize;
|
|
|
|
logger.info(limint);
|
|
|
|
return limint;
|
|
|
|
}
|
|
|
|
|
|
|
|
public String getSenserData(String deviceInfoId,String dataType,Integer startTime,Integer endTime)
|
|
|
|
{
|
|
|
|
InfluxDBClient influxDBClient = connect(60000);
|
|
|
|
QueryApi queryApi = influxDBClient.getQueryApi();
|
|
|
|
|
|
|
|
String[] separationstr = separationDeviceInfoId(deviceInfoId);
|
|
|
|
|
|
|
|
List<FluxTable> list = queryApi.query("select * from `"+separationstr[0]+"` where sn='"+separationstr[1]+"' and type='"+dataType+"' and time>="+startTime*1000l*1000l*1000l+" and time<"+endTime*1000l*1000l*1000l,org);
|
|
|
|
|
|
|
|
close(influxDBClient);
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* 查询所有数据库
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
public List<Bucket> findBuckets()
|
|
|
|
{
|
|
|
|
InfluxDBClient influxDBClient = connect();
|
|
|
|
List<Bucket> buckets = influxDBClient.getBucketsApi().findBuckets();
|
|
|
|
close(influxDBClient);
|
|
|
|
return buckets;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* 查询数据库下所有表
|
|
|
|
* @param bucket
|
|
|
|
* @return
|
|
|
|
*/
|
|
|
|
public List<FluxTable> findMeasurements(String bucket)
|
|
|
|
{
|
|
|
|
InfluxDBClient influxDBClient = connect();
|
|
|
|
List<FluxTable> tables = influxDBClient.getQueryApi().query("import \"influxdata/influxdb/v1\"\n"
|
|
|
|
+ "v1.measurements(bucket:\""+bucket+"\")", org);
|
|
|
|
close(influxDBClient);
|
|
|
|
return tables;
|
|
|
|
}
|
|
|
|
|
|
|
|
public void queryMeasurementDataWithPaging(String bucket,String measurement, int limit, int offset) {
|
|
|
|
InfluxDBClient influxDBClient = connect();
|
|
|
|
QueryApi queryApi = influxDBClient.getQueryApi();
|
|
|
|
String flux = String.format("from(bucket:\""+bucket+"\") |> range(start: -5y) |> filter(fn: (r) => r._measurement == \"%s\") |> last() ", measurement, limit, offset);
|
|
|
|
List<FluxTable> tables = queryApi.query(flux, org);
|
|
|
|
|
|
|
|
for (FluxTable table : tables) {
|
|
|
|
List<FluxRecord> records = table.getRecords();
|
|
|
|
for (FluxRecord record : records) {
|
|
|
|
System.out.println(record.getTime() + ": " + record.getValue());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
close(influxDBClient);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* 带时区信息的UTC格式
|
|
|
|
*/
|
|
|
|
public static final String UTC_ZONE_FORMATER = "yyyy-MM-dd'T'HH:mm:ss.SSSZ";
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
*查询数据
|
|
|
|
*/
|
|
|
|
public void select(String bucketName, String tableName,Integer start,Integer stop){
|
|
|
|
InfluxDBClient influxDBClient = connect();
|
|
|
|
StringBuffer stringBuilder = new StringBuffer();
|
|
|
|
InfluxDBFluxExpression.appendCommonFlux(stringBuilder, bucketName, tableName, Instant.ofEpochMilli(start*1000l).toString(), Instant.ofEpochMilli(stop*1000l).toString());
|
|
|
|
// InfluxDBFluxExpression.appendTagFlux(stringBuilder, map.get("sn").toString());
|
|
|
|
// InfluxDBFluxExpression.appendTimeShiftFlux(stringBuilder);
|
|
|
|
logger.info("查询sql :{}", stringBuilder.toString());
|
|
|
|
// 通过时间分组 查询时间段的数据
|
|
|
|
List<FluxTable> tables = influxDBClient.getQueryApi().query(stringBuilder.toString(),org);
|
|
|
|
List<Map<String, Object>> list = new ArrayList<>();
|
|
|
|
for (FluxTable table : tables) {
|
|
|
|
List<FluxRecord> records = table.getRecords();
|
|
|
|
for (FluxRecord record : records) {
|
|
|
|
logger.info("{}---{}---{}---{}", record.getMeasurement(),record.getField(),record.getValue(),record.getTime());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
close(influxDBClient);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
public void queryData(String bucket,String tableName,Integer start, Integer stop) {
|
|
|
|
InfluxDBClient influxDBClient = connect();
|
|
|
|
String predicate = "_measurement=\""+tableName+"\"";
|
|
|
|
QueryApi queryApi = influxDBClient.getQueryApi();
|
|
|
|
String query = String.format("from(bucket:\"%s\") " +
|
|
|
|
"|> range(start: %s, stop: %s) " +
|
|
|
|
"|> filter(fn: (r) => %s)", bucket, OffsetDateTime.ofInstant(Instant.ofEpochMilli(start*1000l), ZoneOffset.UTC), OffsetDateTime.ofInstant(Instant.ofEpochMilli(stop*1000l), ZoneOffset.UTC), predicate);
|
|
|
|
|
|
|
|
logger.info("查询:"+query);
|
|
|
|
List<FluxTable> tables = queryApi.query(query, org);
|
|
|
|
|
|
|
|
for (FluxTable table : tables) {
|
|
|
|
List<FluxRecord> records = table.getRecords();
|
|
|
|
for (FluxRecord record : records) {
|
|
|
|
System.out.println(record.getTime() + ": " + record.getValue());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
close(influxDBClient);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* 删除数据
|
|
|
|
*/
|
|
|
|
public void delete(String bucketName, String tableName,Integer start,Integer stop) {
|
|
|
|
InfluxDBClient influxDBClient = connect();
|
|
|
|
String predicate = "_measurement=\""+tableName+"\"";
|
|
|
|
influxDBClient.getDeleteApi().delete( OffsetDateTime.ofInstant(Instant.ofEpochMilli(start*1000l), ZoneOffset.UTC),
|
|
|
|
OffsetDateTime.ofInstant(Instant.ofEpochMilli(stop*1000l), ZoneOffset.UTC),
|
|
|
|
predicate,bucketName, org);
|
|
|
|
close(influxDBClient);
|
|
|
|
}
|
|
|
|
public static void main(String[] args) {
|
|
|
|
InfluxDB2Service influxDB2Service = new InfluxDB2Service();
|
|
|
|
// influxDB2Service.delete("6_W","865501049001200",1580918400,1581091200);
|
|
|
|
influxDB2Service.select("6_W","865501049001200",1580745600,1580832000);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
} |
...
|
...
|
|