实现SpringModule
import net.hasor.core.ApiBinder;
import net.hasor.core.DimModule;
import net.hasor.core.TypeSupplier;
import net.hasor.dataql.fx.db.LookupDataSourceListener;
import net.hasor.dataway.spi.CompilerSpiListener;
import net.hasor.db.JdbcModule;
import net.hasor.db.Level;
import net.hasor.spring.SpringModule;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.sql.DataSource;
import java.sql.Connection;
import java.util.function.Supplier;
/**
* @author liuyun
* @date 2021/03/23 14:25
*/
@DimModule
@Component
public class ExampleModule implements SpringModule {
@Autowired
private DataSource dataSource;
/**
* Hasor 启动的时候会调用 loadModule 方法,在这里再把 DataSource 设置到 Hasor 中。
* @param apiBinder
* @throws Throwable
*/
@Override
public void loadModule(ApiBinder apiBinder) throws Throwable {
final TypeSupplier springTypeSupplier = springTypeSupplier(apiBinder);
apiBinder.installModule(new JdbcModule(Level.Full, this.dataSource));
// 在注册SPI时,把SPI的创建委托给Spring
apiBinder.bindSpiListener(LookupDataSourceListener.class, (Supplier<LookupDataSourceListener>) (() -> springTypeSupplier.get(BusiDbLookUpSpi.class)));
apiBinder.bindSpiListener(CompilerSpiListener.class, (Supplier<CompilerSpiListener>) () -> springTypeSupplier.get(EnhanceDataqlSpi.class));
}
}
实现LookupDataSourceListener
import com.alibaba.druid.pool.DruidDataSource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.datasource.SimpleDriverDataSource;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import javax.sql.DataSource;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author liuyun
* @date 2021/03/24 10:27
*/
@Slf4j
@Component
public class BusiDbLookUpSpi implements LookupDataSourceListener {
private Map<String, DataSource> busiDbPool = new ConcurrentHashMap<>();
@Autowired
private MetaDBCfgDpmService metaDBCfgDpmService;
@PostConstruct
public void init() {
List<MetaDBCfgDpmDTO> dbCfgList = metaDBCfgDpmService.findAll();
dbCfgList.forEach(dbCfg -> {
String dbname = dbCfg.getDbname();
String driverclassname = dbCfg.getDriverclassname();
String url = dbCfg.getUrl();
String username = dbCfg.getUsername();
String password = dbCfg.getPassword();
password = password == null ? null : DesCipher.decrypt(password);
DataSource ds = buildDataSource(url, driverclassname, username, password);
busiDbPool.put(dbname, ds);
});
}
@Override
public DataSource lookUp(String lookupName) {
DataSource busiDb = busiDbPool.get(lookupName);
if (busiDb == null) {
List<MetaDBCfgDpmDTO> metaDB = metaDBCfgDpmService.findByName(lookupName);
if (metaDB.size() == 0) {
throw new BusinessException("lookupName [" + lookupName + "] is invalid.");
}
MetaDBCfgDpmDTO dbCfg = metaDB.get(0);
String dbname = dbCfg.getDbname();
String driverclassname = dbCfg.getDriverclassname();
String url = dbCfg.getUrl();
String username = dbCfg.getUsername();
String password = dbCfg.getPassword();
password = password == null ? null : DesCipher.decrypt(password);
busiDb = buildDataSource(url, driverclassname, username, password);
busiDbPool.put(dbname, busiDb);
}
return busiDb;
}
public DataSource buildDataSource(String url, String driverclassname, String username, String password) {
return driverclassname.indexOf("hive") > -1 || driverclassname.indexOf("presto") > -1 ? buildHadoopDataSource(url, driverclassname, username, password) : buildRdbDataSource(url, driverclassname, username, password);
}
public DataSource buildRdbDataSource(String url, String driverclassname, String username, String password) {
DruidDataSource ds = new DruidDataSource();
ds.setUrl(url);
ds.setDriverClassName(driverclassname);
ds.setUsername(username);
ds.setPassword(password);
ds.setInitialSize(1);
ds.setMinIdle(1);
ds.setMaxActive(20);
ds.setMaxWait(10000);
ds.setValidationQuery("select 1");
ds.setTestWhileIdle(true);
ds.setTestOnBorrow(true);
ds.setTimeBetweenEvictionRunsMillis(60000);
ds.setMinEvictableIdleTimeMillis(300000);
return ds;
}
public DataSource buildHadoopDataSource(String url, String driverclassname, String username, String password) {
Class clz = null;
try {
clz = Class.forName(driverclassname);
} catch (Exception e) {
log.error("驱动类加载失败:" + driverclassname, e);
}
SimpleDriverDataSource ds = new SimpleDriverDataSource();
ds.setUrl(url);
ds.setDriverClass(clz);
ds.setUsername(username);
ds.setPassword("".equals(password) ? null : password);
return ds;
}
public void removeDataSourceFromPool(String dbName) {
busiDbPool.remove(dbName);
}
}
实现CompilerSpiListener
@Slf4j
@Component
public class EnhanceDataqlSpi implements CompilerSpiListener {
@Autowired
private MetaObjMapper mtaObjMapper;
@Override
public QIL compiler(ApiInfo apiInfo, String query, DataQL dataQL) throws IOException {
Long apiId = Long.valueOf(apiInfo.getApiID());
MetaObj metaObj = mtaObjMapper.selectByPrimaryKey(apiId.toString());
if (metaObj.getDbname() != null) {
log.info(String.format("Found dbId [%s] for apiId[%s].", metaObj.getDbname(), apiId));
// 在所有DataQL 查询的前面都统一追加一个 hint
query = "hint FRAGMENT_SQL_DATA_SOURCE='" + metaObj.getDbname() + "'; " + query;
} else {
log.info(String.format("Not found dbId for apiId[%s], use default dataSource.", apiId));
}
return CompilerSpiListener.DEFAULT.compiler(apiInfo, query, dataQL);
}
}