private SqlSession openSessionFromDataSource(ExecutorType execType, TransactionIsolationLevel level, boolean autoCommit) { Transaction tx = null; try { //XMLConfigBuilder#environmentsElement加载 final Environment environment = configuration.getEnvironment(); final TransactionFactory transactionFactory = getTransactionFactoryFromEnvironment(environment); //新建一个事务 tx = transactionFactory.newTransaction(environment.getDataSource(), level, autoCommit); final Executor executor = configuration.newExecutor(tx, execType); return new DefaultSqlSession(configuration, executor, autoCommit); } catch (Exception e) { closeTransaction(tx); // may have fetched a connection so lets call close() throw ExceptionFactory.wrapException("Error opening session. Cause: " + e, e); } finally { ErrorContext.instance().reset(); } }
Configuration#newExecutor 生成相应的Executor
public Executor newExecutor(Transaction transaction, ExecutorType executorType) { executorType = executorType == null ? defaultExecutorType : executorType; executorType = executorType == null ? ExecutorType.SIMPLE : executorType; Executor executor; if (ExecutorType.BATCH == executorType) { executor = new BatchExecutor(this, transaction); } else if (ExecutorType.REUSE == executorType) { executor = new ReuseExecutor(this, transaction); } else { executor = new SimpleExecutor(this, transaction); } //缓存 if (cacheEnabled) { //后面统一分析Executor集合和其子类,这里将SimpleExecutor包装为一个 //CachingExecutor executor = new CachingExecutor(executor); } //调用拦截器链中注册的拦截器(如果拦截类型是Executor),每个拦截器会对当前 //Executor进行动态代理,如果有n个拦截器拦截Executor,会生成对Executor的n层代理, //后面再进行详细分析 executor = (Executor) interceptorChain.pluginAll(executor); return executor; }
SqlSession构造完毕后执行selectOne方法:
@Override public T selectOne(String statement, Object parameter) { // Popular vote was to return null on 0 results and throw exception on too many. List list = this.selectList(statement, parameter); if (list.size() == 1) { return list.get(0); } else if (list.size() > 1) { throw new TooManyResultsException("Expected one result (or null) to be returned by selectOne(), but found: " + list.size()); } else { return null; } }
@Override public List selectList(String statement, Object parameter) { return this.selectList(statement, parameter, RowBounds.DEFAULT); }
public static final String PARAMETER_OBJECT_KEY = "_parameter"; public static final String DATABASE_ID_KEY = "_databaseId";
static { //初始化OgnlRuntime使用ContextAccessor作为访问器 OgnlRuntime.setPropertyAccessor(ContextMap.class, new ContextAccessor()); }
private final ContextMap bindings; private final StringBuilder sqlBuilder = new StringBuilder(); private int uniqueNumber = 0; //如果当前参数是实体对象或者List的子类等其他对象,反正不是Map
public DynamicContext(Configuration configuration, Object parameterObject) { //当前参数不是map的实例 if (parameterObject != null && !(parameterObject instanceof Map)) { MetaObject metaObject = configuration.newMetaObject(parameterObject); bindings = new ContextMap(metaObject); } else { //为空或者是Map bindings = new ContextMap(null); } //将对象参数放入 key为_parameter的map中 bindings.put(PARAMETER_OBJECT_KEY, parameterObject); bindings.put(DATABASE_ID_KEY, configuration.getDatabaseId()); }
public Map getBindings() { return bindings; }
public void bind(String name, Object value) { bindings.put(name, value); }
public void appendSql(String sql) { sqlBuilder.append(sql); sqlBuilder.append(" "); }
public String getSql() { return sqlBuilder.toString().trim(); }
public int getUniqueNumber() { return uniqueNumber++; }
static class ContextMap extends HashMap { private static final long serialVersionUID = 2977601501966151582L;
private final Object originalObject; private final ObjectWrapper objectWrapper; private final ObjectFactory objectFactory; private final ObjectWrapperFactory objectWrapperFactory; private final ReflectorFactory reflectorFactory;
public class IfSqlNode implements SqlNode { private final ExpressionEvaluator evaluator; private final String test; //IfSqlNode也包含contents,可能是MixedSqlNode,StaticTextSqlNode private final SqlNode contents;
public IfSqlNode(SqlNode contents, String test) { this.test = test; this.contents = contents; this.evaluator = new ExpressionEvaluator(); }
@Override public boolean apply(DynamicContext context) { //判断test表达式值,这里通过DynamicContext.ContextAccessor的getProperty方法调用ContextMap中的get方法继而调用MetaObject的getValue最终获取到传入对象的属性值。 if (evaluator.evaluateBoolean(test, context.getBindings())) { contents.apply(context); return true; } return false; }
private ParameterMapping buildParameterMapping(String content) { //ParameterExpression.parse方法会解析content中是否还有其他表达式, //最终生成 propertiesMap property->#{}中包含的值 如 property->id //和其他表达式或者jdbc和ojdbc之间的映射 Map propertiesMap = parseParameterMapping(content); String property = propertiesMap.get("property"); Class> propertyType; if (metaParameters.hasGetter(property)) { // issue #448 get type from additional params propertyType = metaParameters.getGetterType(property); } else if (typeHandlerRegistry.hasTypeHandler(parameterType)) { propertyType = parameterType; } else if (JdbcType.CURSOR.name().equals(propertiesMap.get("jdbcType"))) { propertyType = java.sql.ResultSet.class; } else if (property == null || Map.class.isAssignableFrom(parameterType)) { propertyType = Object.class; } else { //这里通过寻找参数类型中是否有对应的属性,有则propertyType被设置为相应的类型 MetaClass metaClass = MetaClass.forClass(parameterType, configuration.getReflectorFactory()); if (metaClass.hasGetter(property)) { propertyType = metaClass.getGetterType(property); } else { propertyType = Object.class; } } ParameterMapping.Builder builder = new ParameterMapping.Builder(configuration, property, propertyType); Class> javaType = propertyType; String typeHandlerAlias = null; //检查是否有Java类型和jdbc类型的映射 for (Map.Entry entry : propertiesMap.entrySet()) { String name = entry.getKey(); String value = entry.getValue(); if ("javaType".equals(name)) { javaType = resolveClass(value); builder.javaType(javaType); } else if ("jdbcType".equals(name)) { builder.jdbcType(resolveJdbcType(value)); } else if ("mode".equals(name)) { builder.mode(resolveParameterMode(value)); } else if ("numericScale".equals(name)) { builder.numericScale(Integer.valueOf(value)); } else if ("resultMap".equals(name)) { builder.resultMapId(value); } else if ("typeHandler".equals(name)) { typeHandlerAlias = value; } else if ("jdbcTypeName".equals(name)) { builder.jdbcTypeName(value); } else if ("property".equals(name)) { // Do Nothing } else if ("expression".equals(name)) { throw new BuilderException("Expression based parameters are not supported yet"); } else { throw new BuilderException("An invalid property '" + name + "' was found in mapping #{" + content + "}. Valid properties are " + parameterProperties); } } if (typeHandlerAlias != null) { //有则为parameterMapping设置相应的类型处理器 builder.typeHandler(resolveTypeHandler(javaType, typeHandlerAlias)); } //构建ParameterMapping return builder.build(); }
private Map parseParameterMapping(String content) { try { return new ParameterExpression(content); } catch (BuilderException ex) { throw ex; } catch (Exception ex) { throw new BuilderException("Parsing error was found in mapping #{" + content + "}. Check syntax #{property|(expression), var1=value1, var2=value2, ...} ", ex); } } }
}
ParameterMapping://TODO
继续执行SqlSourceBuilder#parse,生成StaticSqlSource,
new StaticSqlSource(configuration, sql, handler.getParameterMappings());
public class StaticSqlSource implements SqlSource {
private final String sql; private final List parameterMappings; private final Configuration configuration;
public StaticSqlSource(Configuration configuration, String sql) { this(configuration, sql, null); }
public StaticSqlSource(Configuration configuration, String sql, List parameterMappings) { this.sql = sql; this.parameterMappings = parameterMappings; this.configuration = configuration; }
@Override public BoundSql getBoundSql(Object parameterObject) { return new BoundSql(configuration, sql, parameterMappings, parameterObject); }
}
BoundSql:
public class BoundSql {
private final String sql; private final List parameterMappings; private final Object parameterObject; private final Map additionalParameters; private final MetaObject metaParameters;
public BoundSql(Configuration configuration, String sql, List parameterMappings, Object parameterObject) { this.sql = sql; this.parameterMappings = parameterMappings; this.parameterObject = parameterObject; this.additionalParameters = new HashMap(); this.metaParameters = configuration.newMetaObject(additionalParameters); }
public String getSql() { return sql; }
public List getParameterMappings() { return parameterMappings; }
public Object getParameterObject() { return parameterObject; }
public boolean hasAdditionalParameter(String name) { String paramName = new PropertyTokenizer(name).getName(); return additionalParameters.containsKey(paramName); }
public void setAdditionalParameter(String name, Object value) { metaParameters.setValue(name, value); }
public Object getAdditionalParameter(String name) { return metaParameters.getValue(name); } }
SqlNode接口:表示每个sql片段,有两种实现类,动态:的有if where 等和静态的sql片段,表示不含表达式的sql语句。此接口只包含一个方法,此方法入参为DynamicContext,每次调用判断将sql追加到上下文的末尾,形成整个一个语句。各种SqlNode在XMLScriptBuilder#parseDynamicTags被创建加入一个MixedSqlNode中,MapperStatement的rootSqlNode执行apply后销亡。
ParameterMapping:包含当前#{}中包含的参数对象类型的属性名称、类型等,执行阶段被ParameterMappingTokenHandler#buildParameterMapping或者MapperBuilderAssistant#buildParameterMapping生成,并加入SqlSource
SqlSource:在动态的SqlSource包含SqlNode的实例,静态SqlSource包含解析后的sql和参数映射ParameterMapping,DynamicSqlSource在解析阶段使用,StaticSqlSource在执行阶段使用。
好久不来iteye,今天又来看看,哈哈,今天碰到在编码时,反射中会抛出
Illegal overloaded getter method with ambiguous type for propert这么个东东,从字面意思看,是反射在获取getter时迷惑了,然后回想起java在boolean值在生成getter时,分别有is和getter,也许我们的反射对象中就有is开头的方法迷惑了jdk,
泛型
在Java SE 1.5之前,没有泛型的情况的下,通过对类型Object的引用来实现参数的“任意化”,任意化的缺点就是要实行强制转换,这种强制转换可能会带来不安全的隐患
泛型的特点:消除强制转换 确保类型安全 向后兼容
简单泛型的定义:
泛型:就是在类中将其模糊化,在创建对象的时候再具体定义
class fan
安装lua_nginx_module 模块
lua_nginx_module 可以一步步的安装,也可以直接用淘宝的OpenResty
Centos和debian的安装就简单了。。
这里说下freebsd的安装:
fetch http://www.lua.org/ftp/lua-5.1.4.tar.gz
tar zxvf lua-5.1.4.tar.gz
cd lua-5.1.4
ma
public class IsAccendListRecursive {
/*递归判断数组是否升序
* if a Integer array is ascending,return true
* use recursion
*/
public static void main(String[] args){
IsAccendListRecursiv
function init() {
var tt = document.getElementById("tt");
var childNodes = tt.childNodes[0].childNodes;
var level = 0;
for (var i = 0; i < childNodes.length; i++) {
在安装hadoop时,执行JPS出现下面错误
[slave16][email protected]:/tmp/hsperfdata_hdfs# jps
Error occurred during initialization of VM
java.lang.Error: Properties init: Could not determine current working