Skip to content

Commit

Permalink
oceanbase dimension
Browse files Browse the repository at this point in the history
  • Loading branch information
FlechazoW committed Mar 26, 2020
1 parent 201dd74 commit adcf847
Show file tree
Hide file tree
Showing 19 changed files with 570 additions and 32 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,9 @@ public class ExecuteProcessHelper {


public static ParamsInfo parseParams(String[] args) throws Exception {
LOG.info("------------program params-------------------------");
System.out.println("------------program params-------------------------");
Arrays.stream(args).forEach(arg -> LOG.info("{}", arg));
Arrays.stream(args).forEach(System.out::println);
LOG.info("-------------------------------------------");
System.out.println("----------------------------------------");

OptionParser optionParser = new OptionParser(args);
Expand Down Expand Up @@ -288,9 +286,7 @@ public static Set<URL> registerTable(SqlTree sqlTree, StreamExecutionEnvironment

RowTypeInfo typeInfo = new RowTypeInfo(adaptTable.getSchema().getFieldTypes(), adaptTable.getSchema().getFieldNames());
DataStream adaptStream = tableEnv.toRetractStream(adaptTable, typeInfo)
.map((Tuple2<Boolean, Row> f0) -> {
return f0.f1;
})
.map((Tuple2<Boolean, Row> f0) -> f0.f1)
.returns(typeInfo);

String fields = String.join(",", typeInfo.getFieldNames());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ public void exec(String sql, Map<String, AbstractSideTableInfo> sideTableMap, St
SideSQLParser sideSQLParser = new SideSQLParser();
sideSQLParser.setLocalTableCache(localTableCache);
Queue<Object> exeQueue = sideSQLParser.getExeQueue(sql, sideTableMap.keySet());
Object pollObj = null;
Object pollObj;

//need clean
boolean preIsSideJoin = false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,7 @@
public class WaterMarkerAssigner {

public boolean checkNeedAssignWaterMarker(AbstractSourceTableInfo tableInfo){
if(Strings.isNullOrEmpty(tableInfo.getEventTimeField())){
return false;
}

return true;
return !Strings.isNullOrEmpty(tableInfo.getEventTimeField());
}

public DataStream assignWaterMarker(DataStream<Row> dataStream, RowTypeInfo typeInfo, AbstractSourceTableInfo sourceTableInfo){
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
Expand All @@ -67,8 +68,7 @@ public class LauncherMain {

private static String getLocalCoreJarPath(String localSqlRootJar) throws Exception {
String jarPath = PluginUtil.getCoreJarFileName(localSqlRootJar, CORE_JAR);
String corePath = localSqlRootJar + SP + jarPath;
return corePath;
return localSqlRootJar + SP + jarPath;
}

public static void main(String[] args) throws Exception {
Expand All @@ -85,14 +85,14 @@ public static void main(String[] args) throws Exception {
Properties confProperties = PluginUtil.jsonStrToObject(confProp, Properties.class);

if(mode.equals(ClusterMode.local.name())) {
String[] localArgs = argList.toArray(new String[argList.size()]);
String[] localArgs = argList.toArray(new String[0]);
Main.main(localArgs);
return;
}

String pluginRoot = launcherOptions.getLocalSqlPluginPath();
File jarFile = new File(getLocalCoreJarPath(pluginRoot));
String[] remoteArgs = argList.toArray(new String[argList.size()]);
String[] remoteArgs = argList.toArray(new String[0]);
PackagedProgram program = new PackagedProgram(jarFile, Lists.newArrayList(), remoteArgs);

String savePointPath = confProperties.getProperty(ConfigConstrant.SAVE_POINT_PATH_KEY);
Expand All @@ -116,14 +116,14 @@ public static void main(String[] args) throws Exception {

private static String[] parseJson(String[] args) {
BufferedReader reader = null;
String lastStr = "";
StringBuilder lastStr = new StringBuilder();
try{
FileInputStream fileInputStream = new FileInputStream(args[0]);
InputStreamReader inputStreamReader = new InputStreamReader(fileInputStream, "UTF-8");
InputStreamReader inputStreamReader = new InputStreamReader(fileInputStream, StandardCharsets.UTF_8);
reader = new BufferedReader(inputStreamReader);
String tempString = null;
while((tempString = reader.readLine()) != null){
lastStr += tempString;
lastStr.append(tempString);
}
reader.close();
}catch(IOException e){
Expand All @@ -137,14 +137,13 @@ private static String[] parseJson(String[] args) {
}
}
}
Map<String, Object> map = JSON.parseObject(lastStr, new TypeReference<Map<String, Object>>(){} );
Map<String, Object> map = JSON.parseObject(lastStr.toString(), new TypeReference<Map<String, Object>>(){} );
List<String> list = new LinkedList<>();

for (Map.Entry<String, Object> entry : map.entrySet()) {
list.add("-" + entry.getKey());
list.add(entry.getValue().toString());
}
String[] array = list.toArray(new String[list.size()]);
return array;
return list.toArray(new String[0]);
}
}
94 changes: 94 additions & 0 deletions oceanbase/oceanbase-side/oceanbase-all-side/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>sql.side.oceanbase</artifactId>
<groupId>com.dtstack.flink</groupId>
<version>1.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

<artifactId>sql.side.all.oceanbase</artifactId>
<name>oceanbase-all-side</name>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>

<properties>
<sql.side.oceanbase.core.version>1.0-SNAPSHOT</sql.side.oceanbase.core.version>
</properties>

<dependencies>
<dependency>
<groupId>com.dtstack.flink</groupId>
<artifactId>sql.side.oceanbase.core</artifactId>
<version>${sql.side.oceanbase.core.version}</version>
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>1.4</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
<artifactSet>
<excludes>

</excludes>
</artifactSet>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
</plugin>

<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<version>1.2</version>
<executions>
<execution>
<id>copy-resources</id>
<!-- here the phase you need -->
<phase>package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<tasks>
<copy todir="${basedir}/../../../plugins/oceanbaseallside">
<fileset dir="target/">
<include name="${project.artifactId}-${project.version}.jar"/>
</fileset>
</copy>

<move file="${basedir}/../../../plugins/oceanbaseallside/${project.artifactId}-${project.version}.jar"
tofile="${basedir}/../../../plugins/oceanbaseallside/${project.name}-${git.branch}.jar"/>
</tasks>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>


</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.dtstatck.flink.sql.side.oceanbase;

import com.dtstack.flink.sql.side.AbstractSideTableInfo;
import com.dtstack.flink.sql.side.FieldInfo;
import com.dtstack.flink.sql.side.JoinInfo;
import com.dtstack.flink.sql.side.rdb.all.AbstractRdbAllReqRow;
import com.dtstack.flink.sql.util.DtStringUtil;
import com.google.common.collect.Maps;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Connection;
import java.sql.DriverManager;
import java.util.List;
import java.util.Map;

/**
* @author : tiezhu
* @date : 2020/3/26
*/
public class OceanbaseAllReqRow extends AbstractRdbAllReqRow {

private static final Logger LOG = LoggerFactory.getLogger(OceanbaseAllReqRow.class);

private static final String OCEAN_BASE_DRIVER = "com.mysql.jdbc.Driver";

public OceanbaseAllReqRow(RowTypeInfo rowTypeInfo,
JoinInfo joinInfo,
List<FieldInfo> outFieldInfoList,
AbstractSideTableInfo sideTableInfo) {
super(new OceanbaseAllSideInfo(rowTypeInfo, joinInfo, outFieldInfoList, sideTableInfo));
}

@Override
public Connection getConn(String dbUrl, String userName, String password) {
try {
Class.forName(OCEAN_BASE_DRIVER);
Map<String, String> addParams = Maps.newHashMap();
addParams.put("useCursorFetch", "true");
String targetDbUrl = DtStringUtil.addJdbcParam(dbUrl, addParams, true);
return DriverManager.getConnection(targetDbUrl, userName, password);
} catch (Exception e) {
LOG.error("oceanbase get connect error", e);
throw new RuntimeException(e);
}
}

@Override
public int getFetchSize() {
return Integer.MIN_VALUE;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.dtstatck.flink.sql.side.oceanbase;

import com.dtstack.flink.sql.side.AbstractSideTableInfo;
import com.dtstack.flink.sql.side.FieldInfo;
import com.dtstack.flink.sql.side.JoinInfo;
import com.dtstack.flink.sql.side.rdb.all.RdbAllSideInfo;
import org.apache.flink.api.java.typeutils.RowTypeInfo;

import java.util.List;

/**
* @author : tiezhu
* @date : 2020/3/26
*/
public class OceanbaseAllSideInfo extends RdbAllSideInfo {
public OceanbaseAllSideInfo(RowTypeInfo rowTypeInfo,
JoinInfo joinInfo,
List<FieldInfo> outFieldInfoList,
AbstractSideTableInfo sideTableInfo) {
super(rowTypeInfo, joinInfo, outFieldInfoList, sideTableInfo);
}
}
Loading

0 comments on commit adcf847

Please sign in to comment.