Created
June 25, 2020 00:01
-
-
Save randallwhitman/50eaccccb4b9d6c9aafbc8d394364cda to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
diff --git a/README.md b/README.md | |
index e0381ae..ad8b512 100644 | |
--- a/README.md | |
+++ b/README.md | |
@@ -71,7 +71,7 @@ Find a bug or want to request a new feature? Please let us know by submitting a | |
Esri welcomes contributions from anyone and everyone. Please see our [guidelines for contributing](https://github.com/esri/contributing) | |
## Licensing | |
-Copyright 2013-2019 Esri | |
+Copyright 2013-2020 Esri | |
Licensed under the Apache License, Version 2.0 (the "License"); | |
you may not use this file except in compliance with the License. | |
diff --git a/hive/src/main/java/com/esri/hadoop/hive/serde/BaseJsonSerDe.java b/hive/src/main/java/com/esri/hadoop/hive/serde/BaseJsonSerDe.java | |
index 8c03ae6..813a8c1 100644 | |
--- a/hive/src/main/java/com/esri/hadoop/hive/serde/BaseJsonSerDe.java | |
+++ b/hive/src/main/java/com/esri/hadoop/hive/serde/BaseJsonSerDe.java | |
@@ -16,9 +16,11 @@ import org.apache.hadoop.hive.serde2.SerDeException; | |
import org.apache.hadoop.hive.serde2.SerDeStats; | |
import org.apache.hadoop.hive.serde2.io.ByteWritable; | |
import org.apache.hadoop.hive.serde2.io.DateWritable; | |
+import org.apache.hadoop.hive.serde2.io.DateWritableV2; | |
import org.apache.hadoop.hive.serde2.io.DoubleWritable; | |
import org.apache.hadoop.hive.serde2.io.ShortWritable; | |
import org.apache.hadoop.hive.serde2.io.TimestampWritable; | |
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2; | |
import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive; | |
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; | |
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; | |
@@ -147,7 +149,7 @@ abstract public class BaseJsonSerDe extends AbstractSerDe { | |
// null out array because we reuse it and we don't want values persisting | |
// from the last record | |
- for (int i=0;i<numColumns;i++) | |
+ for (int i=0; i<numColumns; i++) | |
row.set(i, null); | |
try { | |
@@ -321,7 +323,13 @@ abstract public class BaseJsonSerDe extends AbstractSerDe { | |
PrimitiveObjectInspector poi, JsonGenerator jsonGen) | |
throws JsonProcessingException, IOException { | |
Object prim = poi.getPrimitiveJavaObject(value); | |
- if (prim instanceof java.util.Date) { | |
+ if (prim instanceof org.apache.hadoop.hive.common.type.Timestamp) { | |
+ long epoch = ((org.apache.hadoop.hive.common.type.Timestamp)prim).toEpochMilli(); | |
+ jsonGen.writeObjectField(label, epoch); | |
+ } else if (prim instanceof org.apache.hadoop.hive.common.type.Date) { | |
+ long epoch = ((org.apache.hadoop.hive.common.type.Date)prim).toEpochMilli(); | |
+ jsonGen.writeObjectField(label, epoch); | |
+ } else if (prim instanceof java.util.Date) { | |
long epoch = ((java.util.Date)prim).getTime(); | |
long offset = prim instanceof java.sql.Timestamp ? 0 : tz.getOffset(epoch); | |
jsonGen.writeObjectField(label, epoch - offset); // UTC | |
@@ -336,7 +344,22 @@ abstract public class BaseJsonSerDe extends AbstractSerDe { | |
// Parse OGCGeometry from JSON | |
abstract protected OGCGeometry parseGeom(JsonParser parser); | |
- private java.sql.Date parseDate(JsonParser parser) throws JsonParseException, IOException { | |
+ private org.apache.hadoop.hive.common.type.Date parseHiveDate(JsonParser parser) | |
+ throws JsonParseException, IOException { | |
+ org.apache.hadoop.hive.common.type.Date hhd = new org.apache.hadoop.hive.common.type.Date(); | |
+ if (JsonToken.VALUE_NUMBER_INT.equals(parser.getCurrentToken())) { | |
+ long epoch = parser.getLongValue(); | |
+ hhd.setTimeInMillis(epoch); | |
+ } else try { | |
+ long epoch = parseTime(parser.getText(), "yyyy-MM-dd"); | |
+ hhd.setTimeInMillis(epoch); | |
+ } catch (java.text.ParseException e) { | |
+ // arbitrary default | |
+ } | |
+ return hhd; | |
+ } | |
+ | |
+ private java.sql.Date parseSqlDate(JsonParser parser) throws JsonParseException, IOException { | |
java.sql.Date jsd = null; | |
if (JsonToken.VALUE_NUMBER_INT.equals(parser.getCurrentToken())) { | |
long epoch = parser.getLongValue(); | |
@@ -350,7 +373,36 @@ abstract public class BaseJsonSerDe extends AbstractSerDe { | |
return jsd; | |
} | |
- private java.sql.Timestamp parseTime(JsonParser parser) throws JsonParseException, IOException { | |
+ private org.apache.hadoop.hive.common.type.Timestamp parseHiveTime(JsonParser parser) | |
+ throws JsonParseException, IOException { | |
+ org.apache.hadoop.hive.common.type.Timestamp hht = new org.apache.hadoop.hive.common.type.Timestamp(); | |
+ if (JsonToken.VALUE_NUMBER_INT.equals(parser.getCurrentToken())) { | |
+ long epoch = parser.getLongValue(); | |
+ hht.setTimeInMillis(epoch); | |
+ } else { | |
+ java.sql.Timestamp jst = null; | |
+ String value = parser.getText(); | |
+ int point = value.indexOf('.'); | |
+ if (point >= 0) { | |
+ jst = parseTime(value.substring(0,point+4)); // "yyyy-MM-dd HH:mm:ss.SSS" - truncate | |
+ } else { | |
+ jst = parseTime(value); // "yyyy-MM-dd HH:mm:ss.SSS" | |
+ String[] formats = {"yyyy-MM-dd HH:mm:ss", "yyyy-MM-dd HH:mm", "yyyy-MM-dd"}; | |
+ for (String format: formats) { | |
+ if (jst != null) break; | |
+ try { | |
+ jst = new java.sql.Timestamp(parseTime(value, format)); | |
+ } catch (java.text.ParseException e) { | |
+ // remain null | |
+ } | |
+ } | |
+ } | |
+ hht.setTimeInMillis(jst.getTime()); | |
+ } | |
+ return hht; | |
+ } | |
+ | |
+ private java.sql.Timestamp parseSqlTime(JsonParser parser) throws JsonParseException, IOException { | |
java.sql.Timestamp jst = null; | |
if (JsonToken.VALUE_NUMBER_INT.equals(parser.getCurrentToken())) { | |
long epoch = parser.getLongValue(); | |
@@ -389,6 +441,20 @@ abstract public class BaseJsonSerDe extends AbstractSerDe { | |
return new java.text.SimpleDateFormat(format).parse(value).getTime(); | |
} | |
+ private void setDateField(int fieldIndex, JsonParser parser) throws JsonParseException, IOException { | |
+ if (row.get(fieldIndex) instanceof DateWritable) | |
+ ((DateWritable)row.get(fieldIndex)).set(parseSqlDate(parser)); | |
+ else | |
+ ((DateWritableV2)row.get(fieldIndex)).set(parseHiveDate(parser)); | |
+ } | |
+ | |
+ private void setTimeField(int fieldIndex, JsonParser parser) throws JsonParseException, IOException { | |
+ if (row.get(fieldIndex) instanceof TimestampWritable) | |
+ ((TimestampWritable)row.get(fieldIndex)).set(parseSqlTime(parser)); | |
+ else | |
+ ((TimestampWritableV2)row.get(fieldIndex)).set(parseHiveTime(parser)); | |
+ } | |
+ | |
/** | |
* Copies the Writable at fieldIndex from rowBase to row, then sets the value of the Writable | |
* to the value in parser | |
@@ -429,11 +495,11 @@ abstract public class BaseJsonSerDe extends AbstractSerDe { | |
case BOOLEAN: | |
((BooleanWritable)row.get(fieldIndex)).set(parser.getBooleanValue()); | |
break; | |
- case DATE: // DateWritable stores days not milliseconds. | |
- ((DateWritable)row.get(fieldIndex)).set(parseDate(parser)); | |
+ case DATE: // DateWritable, like java.sql.Date, represents days not milliseconds. | |
+ setDateField(fieldIndex, parser); | |
break; | |
case TIMESTAMP: | |
- ((TimestampWritable)row.get(fieldIndex)).set(parseTime(parser)); | |
+ setTimeField(fieldIndex, parser); | |
break; | |
default: // STRING/unrecognized | |
((Text)row.get(fieldIndex)).set(parser.getText()); | |
diff --git a/hive/src/test/java/com/esri/hadoop/hive/serde/JsonSerDeTestingBase.java b/hive/src/test/java/com/esri/hadoop/hive/serde/JsonSerDeTestingBase.java | |
index 4e938f2..b4c27e5 100644 | |
--- a/hive/src/test/java/com/esri/hadoop/hive/serde/JsonSerDeTestingBase.java | |
+++ b/hive/src/test/java/com/esri/hadoop/hive/serde/JsonSerDeTestingBase.java | |
@@ -7,8 +7,10 @@ import org.apache.hadoop.hive.serde2.AbstractSerDe; | |
import org.apache.hadoop.hive.serde2.io.ByteWritable; | |
import org.apache.hadoop.hive.serde2.io.DoubleWritable; | |
import org.apache.hadoop.hive.serde2.io.DateWritable; | |
+import org.apache.hadoop.hive.serde2.io.DateWritableV2; | |
import org.apache.hadoop.hive.serde2.io.ShortWritable; | |
import org.apache.hadoop.hive.serde2.io.TimestampWritable; | |
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2; | |
import org.apache.hadoop.hive.serde2.objectinspector.StructField; | |
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; | |
import org.apache.hadoop.io.BooleanWritable; | |
@@ -56,10 +58,18 @@ public abstract class JsonSerDeTestingBase { | |
stuff.add(new DateWritable(item)); | |
} | |
+ protected void addWritable(ArrayList<Object> stuff, org.apache.hadoop.hive.common.type.Date item) { | |
+ stuff.add(new DateWritableV2(item)); | |
+ } | |
+ | |
protected void addWritable(ArrayList<Object> stuff, java.sql.Timestamp item) { | |
stuff.add(new TimestampWritable(item)); | |
} | |
+ protected void addWritable(ArrayList<Object> stuff, org.apache.hadoop.hive.common.type.Timestamp item) { | |
+ stuff.add(new TimestampWritableV2(item)); | |
+ } | |
+ | |
protected void addWritable(ArrayList<Object> stuff, Geometry geom) { | |
addWritable(stuff, geom, null); | |
} | |
diff --git a/hive/src/test/java/com/esri/hadoop/hive/serde/TestEsriJsonSerDe.java b/hive/src/test/java/com/esri/hadoop/hive/serde/TestEsriJsonSerDe.java | |
index 03b031f..2a46f88 100644 | |
--- a/hive/src/test/java/com/esri/hadoop/hive/serde/TestEsriJsonSerDe.java | |
+++ b/hive/src/test/java/com/esri/hadoop/hive/serde/TestEsriJsonSerDe.java | |
@@ -3,7 +3,6 @@ package com.esri.hadoop.hive.serde; | |
import org.junit.Assert; | |
import org.junit.Test; | |
-import java.text.SimpleDateFormat; | |
import java.util.ArrayList; | |
import java.util.Properties; | |
import java.util.TimeZone; | |
@@ -12,8 +11,10 @@ import org.apache.hadoop.conf.Configuration; | |
import org.apache.hadoop.hive.serde2.AbstractSerDe; | |
import org.apache.hadoop.hive.serde2.io.ByteWritable; | |
import org.apache.hadoop.hive.serde2.io.DateWritable; | |
+import org.apache.hadoop.hive.serde2.io.DateWritableV2; | |
import org.apache.hadoop.hive.serde2.io.ShortWritable; | |
import org.apache.hadoop.hive.serde2.io.TimestampWritable; | |
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2; | |
import org.apache.hadoop.hive.serde2.objectinspector.StructField; | |
import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; | |
import org.apache.hadoop.hive.serde2.lazy.LazyString; | |
@@ -73,19 +74,24 @@ public class TestEsriJsonSerDe extends JsonSerDeTestingBase { | |
// {"attributes":{"when":147147147147}} | |
long epoch = 147147147147L; | |
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MMM-dd"); | |
- sdf.setTimeZone(TimeZone.getTimeZone("America/New_York")); | |
- java.sql.Date expected = new java.sql.Date(epoch); | |
- String expString = sdf.format(expected); | |
- //System.err.println(expected.getTime()); | |
- addWritable(stuff, expected); | |
- Writable jsw = jserde.serialize(stuff, rowOI); | |
+ long zoned = epoch - TimeZone.getDefault().getOffset(epoch); | |
+ Writable jsw; | |
+ try { | |
+ org.apache.hadoop.hive.common.type.Date hhd = new org.apache.hadoop.hive.common.type.Date(); | |
+ hhd.setTimeInMillis(zoned); | |
+ addWritable(stuff, hhd); | |
+ jsw = jserde.serialize(stuff, rowOI); | |
+ } catch (Exception exc) { | |
+ java.sql.Date expected = new java.sql.Date(zoned); | |
+ addWritable(stuff, expected); | |
+ jsw = jserde.serialize(stuff, rowOI); | |
+ } | |
JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString()); | |
jn = jn.findValue("attributes"); | |
jn = jn.findValue("when"); | |
java.sql.Date actual = new java.sql.Date(jn.getLongValue()); | |
- String actualDateString = sdf.format(actual); | |
- Assert.assertEquals(expString, actualDateString); // workaround DateWritable,j.s.Date | |
+ long day = 24*3600*1000; // DateWritable represents days not milliseconds. | |
+ Assert.assertEquals(epoch/day, jn.getLongValue()/day); | |
} | |
@Test | |
public void TestTimeWrite() throws Exception { | |
@@ -98,14 +104,24 @@ public class TestEsriJsonSerDe extends JsonSerDeTestingBase { | |
// {"attributes":{"when":147147147147}} | |
long epoch = 147147147147L; | |
- java.sql.Timestamp expected = new java.sql.Timestamp(epoch); | |
- addWritable(stuff, expected); | |
- Writable jsw = jserde.serialize(stuff, rowOI); | |
+ long zoned = epoch - TimeZone.getDefault().getOffset(epoch); | |
+ Writable jsw; | |
+ try { | |
+ org.apache.hadoop.hive.common.type.Timestamp hht = new org.apache.hadoop.hive.common.type.Timestamp(); | |
+ hht.setTimeInMillis(zoned); | |
+ addWritable(stuff, hht); | |
+ jsw = jserde.serialize(stuff, rowOI); | |
+ } catch (Exception exc) { | |
+ exc.printStackTrace(); | |
+ java.sql.Date expected = new java.sql.Date(zoned); | |
+ addWritable(stuff, expected); | |
+ jsw = jserde.serialize(stuff, rowOI); | |
+ } | |
JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString()); | |
jn = jn.findValue("attributes"); | |
jn = jn.findValue("when"); | |
java.sql.Timestamp actual = new java.sql.Timestamp(jn.getLongValue()); | |
- Assert.assertEquals(expected, actual); | |
+ Assert.assertEquals(epoch, actual.getTime()); | |
} | |
@Test | |
@@ -168,13 +184,17 @@ public class TestEsriJsonSerDe extends JsonSerDeTestingBase { | |
Object row = jserde.deserialize(value); | |
StructField f0 = rowOI.getStructFieldRef("when"); | |
Object fieldData = rowOI.getStructFieldData(row, f0); | |
- Assert.assertEquals("2020-02-20", | |
- ((DateWritable)fieldData).get().toString()); | |
+ if (fieldData instanceof DateWritable) | |
+ Assert.assertEquals("2020-02-20", ((DateWritable)fieldData).get().toString()); | |
+ else | |
+ Assert.assertEquals("2020-02-20", ((DateWritableV2)fieldData).get().toString()); | |
value.set("{\"attributes\":{\"when\":\"2017-05-05\"}}"); | |
row = jserde.deserialize(value); | |
fieldData = rowOI.getStructFieldData(row, f0); | |
- Assert.assertEquals("2017-05-05", | |
- ((DateWritable)fieldData).get().toString()); | |
+ if (fieldData instanceof DateWritable) | |
+ Assert.assertEquals("2017-05-05", ((DateWritable)fieldData).get().toString()); | |
+ else | |
+ Assert.assertEquals("2017-05-05", ((DateWritableV2)fieldData).get().toString()); | |
} | |
@Test | |
@@ -193,15 +213,21 @@ public class TestEsriJsonSerDe extends JsonSerDeTestingBase { | |
Object row = jserde.deserialize(value); | |
StructField f0 = rowOI.getStructFieldRef("when"); | |
Object fieldData = rowOI.getStructFieldData(row, f0); | |
- //Assert.assertEquals(147147147147L, ((DateWritable)fieldData).get().getTime()); | |
- Assert.assertEquals(new java.sql.Date(147147147147L).toString(), | |
- ((DateWritable)fieldData).get().toString()); | |
+ long day = 24*3600*1000; // DateWritable represents days not milliseconds. | |
+ long epoch = 147147147147L; | |
+ if (fieldData instanceof DateWritable) | |
+ Assert.assertEquals(epoch/day, ((DateWritable)fieldData).get().getTime()/day); | |
+ else | |
+ Assert.assertEquals(epoch/day, ((DateWritableV2)fieldData).get().toEpochMilli()/day); | |
+ System.err.println("{\"attributes\":{\"when\":142857142857}}"); | |
value.set("{\"attributes\":{\"when\":142857142857}}"); | |
row = jserde.deserialize(value); | |
fieldData = rowOI.getStructFieldData(row, f0); | |
- //Assert.assertEquals(142857142857L, ((DateWritable)fieldData).get()); | |
- Assert.assertEquals(new java.sql.Date(142857142857L).toString(), | |
- ((DateWritable)fieldData).get().toString()); | |
+ epoch = 142857142857L; | |
+ if (fieldData instanceof DateWritable) | |
+ Assert.assertEquals(epoch/day, ((DateWritable)fieldData).get().getTime()/day); | |
+ else | |
+ Assert.assertEquals(epoch/day, ((DateWritableV2)fieldData).get().toEpochMilli()/day); | |
} | |
@Test | |
@@ -216,31 +242,43 @@ public class TestEsriJsonSerDe extends JsonSerDeTestingBase { | |
jserde.initialize(config, proptab); | |
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector(); | |
- value.set("{\"attributes\":{\"when\":\"2020-02-20\"}}"); | |
+ String timeStr = "2020-02-20"; | |
+ value.set("{\"attributes\":{\"when\":\"" + timeStr + "\"}}"); | |
Object row = jserde.deserialize(value); | |
StructField f0 = rowOI.getStructFieldRef("when"); | |
Object fieldData = rowOI.getStructFieldData(row, f0); | |
- Assert.assertEquals( | |
- new java.text.SimpleDateFormat("yyyy-MM-dd").parse("2020-02-20").getTime(), | |
- ((TimestampWritable)fieldData).getTimestamp().getTime()); | |
- value.set("{\"attributes\":{\"when\":\"2017-05-05 05:05\"}}"); | |
+ long epoch = new java.text.SimpleDateFormat("yyyy-MM-dd").parse(timeStr).getTime(); | |
+ long got = (fieldData instanceof TimestampWritable) ? | |
+ ((TimestampWritable)fieldData).getTimestamp().getTime() : | |
+ ((TimestampWritableV2)fieldData).getSeconds(); | |
+ Assert.assertEquals(epoch/1000, got); | |
+ timeStr = "2017-05-05 05:05"; | |
+ value.set("{\"attributes\":{\"when\":\"" + timeStr + "\"}}"); | |
row = jserde.deserialize(value); | |
fieldData = rowOI.getStructFieldData(row, f0); | |
- Assert.assertEquals( | |
- new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm").parse("2017-05-05 05:05").getTime(), | |
- ((TimestampWritable)fieldData).getTimestamp().getTime()); | |
- value.set("{\"attributes\":{\"when\":\"2017-08-09 10:11:12\"}}"); | |
+ epoch = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm").parse(timeStr).getTime(); | |
+ got = (fieldData instanceof TimestampWritable) ? | |
+ ((TimestampWritable)fieldData).getTimestamp().getTime() : | |
+ ((TimestampWritableV2)fieldData).getSeconds(); | |
+ Assert.assertEquals(epoch/1000, got); | |
+ timeStr = "2017-08-09 10:11:12"; | |
+ value.set("{\"attributes\":{\"when\":\"" + timeStr + "\"}}"); | |
row = jserde.deserialize(value); | |
fieldData = rowOI.getStructFieldData(row, f0); | |
- Assert.assertEquals( | |
- new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2017-08-09 10:11:12").getTime(), | |
- ((TimestampWritable)fieldData).getTimestamp().getTime()); | |
- value.set("{\"attributes\":{\"when\":\"2017-06-05 04:03:02.123456789\"}}"); | |
+ epoch = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(timeStr).getTime(); | |
+ got = (fieldData instanceof TimestampWritable) ? | |
+ ((TimestampWritable)fieldData).getTimestamp().getTime() : | |
+ ((TimestampWritableV2)fieldData).getSeconds(); | |
+ Assert.assertEquals(epoch/1000, got); | |
+ timeStr = "2017-06-05 04:03:02.123456789"; | |
+ value.set("{\"attributes\":{\"when\":\"" + timeStr + "\"}}"); | |
row = jserde.deserialize(value); | |
fieldData = rowOI.getStructFieldData(row, f0); | |
- Assert.assertEquals( | |
- new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse("2017-06-05 04:03:02.123").getTime(), | |
- ((TimestampWritable)fieldData).getTimestamp().getTime()); // ns parsed but not checked | |
+ epoch = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse(timeStr).getTime(); | |
+ got = (fieldData instanceof TimestampWritable) ? | |
+ ((TimestampWritable)fieldData).getTimestamp().getTime() : | |
+ ((TimestampWritableV2)fieldData).getSeconds(); | |
+ Assert.assertEquals(epoch/1000, got); // ns parsed but not checked | |
} | |
@Test | |
@@ -427,37 +465,6 @@ public class TestEsriJsonSerDe extends JsonSerDeTestingBase { | |
fieldData = getField("text", row, rowOI); | |
Assert.assertEquals("other", ((Text)fieldData).toString()); | |
} | |
- /* * | |
- @Deprecated -> Obsolete | |
- @Test | |
- public void LegacyName() throws Exception { | |
- ArrayList<Object> stuff = new ArrayList<Object>(); | |
- Properties proptab = new Properties(); | |
- proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num,shape"); | |
- proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "bigint,binary"); | |
- Configuration config = new Configuration(); | |
- AbstractSerDe jserde = new JsonSerde(); | |
- jserde.initialize(config, proptab); | |
- StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector(); | |
- | |
- //value.set("{\"attributes\":{\"num\":7},\"geometry\":{\"x\":15.0,\"y\":5.0}}"); | |
- addWritable(stuff, 7L); | |
- addWritable(stuff, new Point(15.0, 5.0)); | |
- Object row = runSerDe(stuff, jserde, rowOI); | |
- Object fieldData = getField("num", row, rowOI); | |
- Assert.assertEquals(7, ((LongWritable)fieldData).get()); | |
- | |
- //value.set("{\"attributes\":{\"num\":4},\"geometry\":{\"x\":7.0,\"y\":2.0}}"); | |
- stuff.clear(); | |
- addWritable(stuff, 4L); | |
- addWritable(stuff, new Point(7.0, 2.0)); | |
- row = runSerDe(stuff, jserde, rowOI); | |
- fieldData = getField("num", row, rowOI); | |
- Assert.assertEquals(4, ((LongWritable)fieldData).get()); | |
- fieldData = getField("shape", row, rowOI); | |
- ckPoint(new Point(7.0, 2.0), (BytesWritable)fieldData); | |
- } | |
- * */ | |
private AbstractSerDe mkSerDe(Properties proptab) throws Exception { | |
Configuration config = new Configuration(); | |
diff --git a/hive/src/test/java/com/esri/hadoop/hive/serde/TestGeoJsonSerDe.java b/hive/src/test/java/com/esri/hadoop/hive/serde/TestGeoJsonSerDe.java | |
index e3a5a7d..ccd7683 100644 | |
--- a/hive/src/test/java/com/esri/hadoop/hive/serde/TestGeoJsonSerDe.java | |
+++ b/hive/src/test/java/com/esri/hadoop/hive/serde/TestGeoJsonSerDe.java | |
@@ -9,6 +9,7 @@ import java.util.TimeZone; | |
import org.apache.hadoop.conf.Configuration; | |
import org.apache.hadoop.hive.serde2.AbstractSerDe; | |
import org.apache.hadoop.hive.serde2.io.DateWritable; | |
+import org.apache.hadoop.hive.serde2.io.DateWritableV2; | |
import org.apache.hadoop.hive.serde2.objectinspector.StructField; | |
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; | |
import org.apache.hadoop.io.BytesWritable; | |
@@ -62,10 +63,19 @@ public class TestGeoJsonSerDe extends JsonSerDeTestingBase { | |
StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector(); | |
// {"properties":{"when":147147147147}} | |
- long epoch = 0L; // 147147147147L; | |
- java.sql.Date expected = new java.sql.Date(epoch - TimeZone.getDefault().getOffset(epoch)); | |
- addWritable(stuff, expected); | |
- Writable jsw = jserde.serialize(stuff, rowOI); | |
+ long epoch = 147147147147L; | |
+ long zoned = epoch - TimeZone.getDefault().getOffset(epoch); | |
+ Writable jsw; | |
+ try { | |
+ org.apache.hadoop.hive.common.type.Date hhd = new org.apache.hadoop.hive.common.type.Date(); | |
+ hhd.setTimeInMillis(zoned); | |
+ addWritable(stuff, hhd); | |
+ jsw = jserde.serialize(stuff, rowOI); | |
+ } catch (Exception exc) { | |
+ java.sql.Date expected = new java.sql.Date(zoned); | |
+ addWritable(stuff, expected); | |
+ jsw = jserde.serialize(stuff, rowOI); | |
+ } | |
JsonNode jn = new ObjectMapper().readTree(((Text)jsw).toString()); | |
jn = jn.findValue("properties"); | |
jn = jn.findValue("when"); | |
@@ -133,13 +143,21 @@ public class TestGeoJsonSerDe extends JsonSerDeTestingBase { | |
Object row = jserde.deserialize(value); | |
StructField f0 = rowOI.getStructFieldRef("when"); | |
Object fieldData = rowOI.getStructFieldData(row, f0); | |
- Assert.assertEquals("2020-02-20", | |
- ((DateWritable)fieldData).get().toString()); | |
+ if (fieldData instanceof DateWritable) | |
+ Assert.assertEquals("2020-02-20", | |
+ ((DateWritable)fieldData).get().toString()); | |
+ else | |
+ Assert.assertEquals("2020-02-20", | |
+ ((DateWritableV2)fieldData).get().toString()); | |
value.set("{\"properties\":{\"when\":\"2017-05-05\"}}"); | |
row = jserde.deserialize(value); | |
fieldData = rowOI.getStructFieldData(row, f0); | |
- Assert.assertEquals("2017-05-05", | |
- ((DateWritable)fieldData).get().toString()); | |
+ if (fieldData instanceof DateWritable) | |
+ Assert.assertEquals("2017-05-05", | |
+ ((DateWritable)fieldData).get().toString()); | |
+ else | |
+ Assert.assertEquals("2017-05-05", | |
+ ((DateWritableV2)fieldData).get().toString()); | |
} | |
@Test | |
@@ -158,15 +176,20 @@ public class TestGeoJsonSerDe extends JsonSerDeTestingBase { | |
Object row = jserde.deserialize(value); | |
StructField f0 = rowOI.getStructFieldRef("when"); | |
Object fieldData = rowOI.getStructFieldData(row, f0); | |
- //Assert.assertEquals(147147147147L, ((DateWritable)fieldData).get().getTime()); | |
- Assert.assertEquals(new java.sql.Date(147147147147L).toString(), | |
- ((DateWritable)fieldData).get().toString()); | |
+ long day = 24*3600*1000; // DateWritable represents days not milliseconds. | |
+ long epoch = 147147147147L; | |
+ if (fieldData instanceof DateWritable) | |
+ Assert.assertEquals(epoch/day, ((DateWritable)fieldData).get().getTime()/day); | |
+ else | |
+ Assert.assertEquals(epoch/day, ((DateWritableV2)fieldData).get().toEpochMilli()/day); | |
value.set("{\"properties\":{\"when\":142857142857}}"); | |
row = jserde.deserialize(value); | |
fieldData = rowOI.getStructFieldData(row, f0); | |
- //Assert.assertEquals(142857142857L, ((DateWritable)fieldData).get()); | |
- Assert.assertEquals(new java.sql.Date(142857142857L).toString(), | |
- ((DateWritable)fieldData).get().toString()); | |
+ epoch = 142857142857L; | |
+ if (fieldData instanceof DateWritable) | |
+ Assert.assertEquals(epoch/day, ((DateWritable)fieldData).get().getTime()/day); | |
+ else | |
+ Assert.assertEquals(epoch/day, ((DateWritableV2)fieldData).get().toEpochMilli()/day); | |
} | |
@Test | |
diff --git a/pom.xml b/pom.xml | |
index 727972a..428e203 100755 | |
--- a/pom.xml | |
+++ b/pom.xml | |
@@ -107,19 +107,62 @@ | |
</properties> | |
</profile> | |
+ <!-- Jackson versions for building - generally deployment should use newer patch level. --> | |
<profile> | |
<id>jackson-2.6</id> | |
<properties> | |
<jackson.version>2.6.7.1</jackson.version> | |
</properties> | |
</profile> | |
- | |
<profile> | |
<id>jackson-2.8</id> | |
<properties> | |
<jackson.version>2.8.11.2</jackson.version> | |
</properties> | |
</profile> | |
+ <profile> | |
+ <id>jackson-2.9</id> | |
+ <properties> | |
+ <jackson.version>2.9.9</jackson.version> | |
+ </properties> | |
+ </profile> | |
+ <profile> | |
+ <id>jackson-2.10</id> | |
+ <properties> | |
+ <jackson.version>2.10.4</jackson.version> | |
+ </properties> | |
+ </profile> | |
+ <profile> | |
+ <id>jackson-2.11</id> | |
+ <properties> | |
+ <jackson.version>2.11.0</jackson.version> | |
+ </properties> | |
+ </profile> | |
+ | |
+ <profile> | |
+ <id>geometry-2.2.0</id> | |
+ <properties> | |
+ <geometry.version>2.2.0</geometry.version> | |
+ </properties> | |
+ </profile> | |
+ <profile> | |
+ <id>geometry-2.2.1</id> | |
+ <properties> | |
+ <geometry.version>2.2.1</geometry.version> | |
+ </properties> | |
+ </profile> | |
+ <profile> | |
+ <id>geometry-2.2.2</id> | |
+ <properties> | |
+ <geometry.version>2.2.2</geometry.version> | |
+ </properties> | |
+ </profile> | |
+ <profile> | |
+ <id>geometry-2.2.3</id> | |
+ <properties> | |
+ <geometry.version>2.2.3</geometry.version> | |
+ </properties> | |
+ </profile> | |
<profile> | |
<id>hadoop-1.1</id> | |
@@ -182,6 +225,12 @@ | |
</properties> | |
</profile> | |
<profile> | |
+ <id>hadoop-2.10</id> | |
+ <properties> | |
+ <hadoop.version>2.10.0</hadoop.version> | |
+ </properties> | |
+ </profile> | |
+ <profile> | |
<id>hadoop-3.0</id> | |
<properties> | |
<hadoop.version>3.0.3</hadoop.version> | |
@@ -190,13 +239,13 @@ | |
<profile> | |
<id>hadoop-3.1</id> | |
<properties> | |
- <hadoop.version>3.1.2</hadoop.version> | |
+ <hadoop.version>3.1.3</hadoop.version> | |
</properties> | |
</profile> | |
<profile> | |
<id>hadoop-3.2</id> | |
<properties> | |
- <hadoop.version>3.2.0</hadoop.version> | |
+ <hadoop.version>3.2.1</hadoop.version> | |
</properties> | |
</profile> | |
@@ -301,7 +350,7 @@ | |
<profile> | |
<id>hive-2.3</id> | |
<properties> | |
- <hive.version>2.3.5</hive.version> | |
+ <hive.version>2.3.7</hive.version> | |
</properties> | |
</profile> | |
<profile> | |
@@ -313,7 +362,7 @@ | |
<profile> | |
<id>hive-3.1</id> | |
<properties> | |
- <hive.version>3.1.1</hive.version> | |
+ <hive.version>3.1.2</hive.version> | |
</properties> | |
</profile> | |
@@ -353,11 +402,11 @@ | |
<java.target.version>1.6</java.target.version> | |
<!-- Versions for dependencies --> | |
- <hadoop.version>2.2.0</hadoop.version> | |
- <hive.version>0.12.0</hive.version> | |
- <jackson.version>2.9.6</jackson.version> | |
+ <hadoop.version>3.2.0</hadoop.version> | |
+ <hive.version>3.1.2</hive.version> | |
+ <jackson.version>2.9.9</jackson.version> | |
<logging.version>1.1.3</logging.version> | |
- <geometry.version>2.2.0</geometry.version> | |
+ <geometry.version>2.2.3</geometry.version> | |
<junit.version>4.11</junit.version> | |
<!-- Versions for plugins --> |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment