Merge pull request #48 from nashtsai/master
code tidy up, minor performance improvement
This commit is contained in:
commit
8648f9458e
|
@ -590,7 +590,7 @@ func (engine *Engine) mapType(t reflect.Type) *Table {
|
||||||
sqlType := Type2SQLType(fieldType)
|
sqlType := Type2SQLType(fieldType)
|
||||||
col = &Column{engine.columnMapper.Obj2Table(t.Field(i).Name), t.Field(i).Name, sqlType,
|
col = &Column{engine.columnMapper.Obj2Table(t.Field(i).Name), t.Field(i).Name, sqlType,
|
||||||
sqlType.DefaultLength, sqlType.DefaultLength2, true, "", make(map[string]bool), false, false,
|
sqlType.DefaultLength, sqlType.DefaultLength2, true, "", make(map[string]bool), false, false,
|
||||||
TWOSIDES, false, false, false, false}
|
TWOSIDES, false, false, false, false, nil}
|
||||||
}
|
}
|
||||||
if col.IsAutoIncrement {
|
if col.IsAutoIncrement {
|
||||||
col.Nullable = false
|
col.Nullable = false
|
||||||
|
|
60
session.go
60
session.go
|
@ -1092,23 +1092,39 @@ func (session *Session) Find(rowsSlicePtr interface{}, condiBean ...interface{})
|
||||||
|
|
||||||
fieldsCount := len(fields)
|
fieldsCount := len(fields)
|
||||||
|
|
||||||
for rawRows.Next() {
|
var newElemFunc func() reflect.Value
|
||||||
var newValue reflect.Value
|
|
||||||
if sliceElementType.Kind() == reflect.Ptr {
|
if sliceElementType.Kind() == reflect.Ptr {
|
||||||
newValue = reflect.New(sliceElementType.Elem())
|
newElemFunc = func() reflect.Value {
|
||||||
} else {
|
return reflect.New(sliceElementType.Elem())
|
||||||
newValue = reflect.New(sliceElementType)
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
newElemFunc = func() reflect.Value {
|
||||||
|
return reflect.New(sliceElementType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var sliceValueSetFunc func(*reflect.Value)
|
||||||
|
|
||||||
|
if sliceValue.Kind() == reflect.Slice {
|
||||||
|
if sliceElementType.Kind() == reflect.Ptr {
|
||||||
|
sliceValueSetFunc = func(newValue *reflect.Value) {
|
||||||
|
sliceValue.Set(reflect.Append(sliceValue, reflect.ValueOf(newValue.Interface())))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sliceValueSetFunc = func(newValue *reflect.Value) {
|
||||||
|
sliceValue.Set(reflect.Append(sliceValue, reflect.Indirect(reflect.ValueOf(newValue.Interface()))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for rawRows.Next() {
|
||||||
|
var newValue reflect.Value = newElemFunc()
|
||||||
|
if sliceValueSetFunc != nil {
|
||||||
err := session.row2Bean(rawRows, fields, fieldsCount, newValue.Interface())
|
err := session.row2Bean(rawRows, fields, fieldsCount, newValue.Interface())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if sliceValue.Kind() == reflect.Slice {
|
sliceValueSetFunc(&newValue)
|
||||||
if sliceElementType.Kind() == reflect.Ptr {
|
|
||||||
sliceValue.Set(reflect.Append(sliceValue, reflect.ValueOf(newValue.Interface())))
|
|
||||||
} else {
|
|
||||||
sliceValue.Set(reflect.Append(sliceValue, reflect.Indirect(reflect.ValueOf(newValue.Interface()))))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -1347,22 +1363,24 @@ func row2map(rows *sql.Rows, fields []string) (resultsMap map[string][]byte, err
|
||||||
|
|
||||||
func (session *Session) getField(dataStruct *reflect.Value, key string, table *Table) *reflect.Value {
|
func (session *Session) getField(dataStruct *reflect.Value, key string, table *Table) *reflect.Value {
|
||||||
|
|
||||||
key = strings.ToLower(key)
|
//key = strings.ToLower(key)
|
||||||
if _, ok := table.Columns[key]; !ok {
|
if col, ok := table.Columns[key]; !ok {
|
||||||
session.Engine.LogWarn(fmt.Sprintf("table %v's has not column %v. %v", table.Name, key, table.ColumnsSeq))
|
session.Engine.LogWarn(fmt.Sprintf("table %v's has not column %v. %v", table.Name, key, table.ColumnsSeq))
|
||||||
return nil
|
return nil
|
||||||
}
|
} else {
|
||||||
col := table.Columns[key]
|
|
||||||
fieldName := col.FieldName
|
fieldName := col.FieldName
|
||||||
fieldPath := strings.Split(fieldName, ".")
|
if col.fieldPath == nil {
|
||||||
|
col.fieldPath = strings.Split(fieldName, ".")
|
||||||
|
}
|
||||||
var fieldValue reflect.Value
|
var fieldValue reflect.Value
|
||||||
if len(fieldPath) > 2 {
|
fieldPathLen := len(col.fieldPath)
|
||||||
|
if fieldPathLen > 2 {
|
||||||
session.Engine.LogError("Unsupported mutliderive", fieldName)
|
session.Engine.LogError("Unsupported mutliderive", fieldName)
|
||||||
return nil
|
return nil
|
||||||
} else if len(fieldPath) == 2 {
|
} else if fieldPathLen == 2 {
|
||||||
parentField := dataStruct.FieldByName(fieldPath[0])
|
parentField := dataStruct.FieldByName(col.fieldPath[0])
|
||||||
if parentField.IsValid() {
|
if parentField.IsValid() {
|
||||||
fieldValue = parentField.FieldByName(fieldPath[1])
|
fieldValue = parentField.FieldByName(col.fieldPath[1])
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
fieldValue = dataStruct.FieldByName(fieldName)
|
fieldValue = dataStruct.FieldByName(fieldName)
|
||||||
|
@ -1373,6 +1391,7 @@ func (session *Session) getField(dataStruct *reflect.Value, key string, table *T
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return &fieldValue
|
return &fieldValue
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (session *Session) row2Bean(rows *sql.Rows, fields []string, fieldsCount int, bean interface{}) error {
|
func (session *Session) row2Bean(rows *sql.Rows, fields []string, fieldsCount int, bean interface{}) error {
|
||||||
|
@ -1395,7 +1414,6 @@ func (session *Session) row2Bean(rows *sql.Rows, fields []string, fieldsCount in
|
||||||
|
|
||||||
for ii, key := range fields {
|
for ii, key := range fields {
|
||||||
if fieldValue := session.getField(&dataStruct, key, table); fieldValue != nil {
|
if fieldValue := session.getField(&dataStruct, key, table); fieldValue != nil {
|
||||||
|
|
||||||
rawValue := reflect.Indirect(reflect.ValueOf(scanResultContainers[ii]))
|
rawValue := reflect.Indirect(reflect.ValueOf(scanResultContainers[ii]))
|
||||||
|
|
||||||
//if row is null then ignore
|
//if row is null then ignore
|
||||||
|
|
Loading…
Reference in New Issue