Skip to content
This repository has been archived by the owner on Oct 8, 2019. It is now read-only.

Commit

Permalink
Fixed a serious bug in "-loadmodel" (k was a shared object for each
Browse files Browse the repository at this point in the history
line)
  • Loading branch information
myui committed Jul 3, 2014
1 parent 4013b1d commit e63a9fc
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 6 deletions.
17 changes: 11 additions & 6 deletions src/main/hivemall/LearnerBaseUDTF.java
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@

public abstract class LearnerBaseUDTF extends UDTFWithOptions {

private static final Log logger = LogFactory.getLog("Hivemall");
private static final Log logger = LogFactory.getLog(LearnerBaseUDTF.class);

protected boolean feature_hashing;
protected float bias;
Expand Down Expand Up @@ -139,6 +139,8 @@ private static void loadPredictionModel(OpenHashMap<Object, WeightValue> map, Fi
StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector();
StructField keyRef = lineOI.getStructFieldRef("key");
StructField valueRef = lineOI.getStructFieldRef("value");
PrimitiveObjectInspector keyRefOI = (PrimitiveObjectInspector) keyRef.getFieldObjectInspector();
FloatObjectInspector varRefOI = (FloatObjectInspector) valueRef.getFieldObjectInspector();

final BufferedReader reader = HadoopUtils.getBufferedReader(file);
try {
Expand All @@ -152,8 +154,8 @@ private static void loadPredictionModel(OpenHashMap<Object, WeightValue> map, Fi
if(f0 == null || f1 == null) {
continue; // avoid the case that key or value is null
}
Object k = ((PrimitiveObjectInspector) keyRef.getFieldObjectInspector()).getPrimitiveWritableObject(f0);
float v = ((FloatObjectInspector) valueRef.getFieldObjectInspector()).get(f1);
Object k = keyRefOI.getPrimitiveWritableObject(keyRefOI.copyObject(f0));
float v = varRefOI.get(f1);
map.put(k, new WeightValue(v));
}
} finally {
Expand All @@ -179,6 +181,9 @@ private static void loadPredictionModel(OpenHashMap<Object, WeightValue> map, Fi
StructField c1ref = lineOI.getStructFieldRef("c1");
StructField c2ref = lineOI.getStructFieldRef("c2");
StructField c3ref = lineOI.getStructFieldRef("c3");
PrimitiveObjectInspector c1oi = (PrimitiveObjectInspector) c1ref.getFieldObjectInspector();
FloatObjectInspector c2oi = (FloatObjectInspector) c2ref.getFieldObjectInspector();
FloatObjectInspector c3oi = (FloatObjectInspector) c3ref.getFieldObjectInspector();

final BufferedReader reader = HadoopUtils.getBufferedReader(file);
try {
Expand All @@ -193,9 +198,9 @@ private static void loadPredictionModel(OpenHashMap<Object, WeightValue> map, Fi
if(f0 == null || f1 == null || f2 == null) {
continue; // avoid unexpected case
}
Object k = ((PrimitiveObjectInspector) c1ref.getFieldObjectInspector()).getPrimitiveWritableObject(f0);
float v = ((FloatObjectInspector) c2ref.getFieldObjectInspector()).get(f1);
float cov = ((FloatObjectInspector) c3ref.getFieldObjectInspector()).get(f2);
Object k = c1oi.getPrimitiveWritableObject(c1oi.copyObject(f0));
float v = c2oi.get(f1);
float cov = c3oi.get(f2);
map.put(k, new WeightValueWithCovar(v, cov));
}
} finally {
Expand Down
Binary file modified target/hivemall.jar
Binary file not shown.

0 comments on commit e63a9fc

Please sign in to comment.