/**
* Copyright [2012] [Datasalt Systems S.L.]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datasalt.pangool.serialization;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.serializer.Deserializer;
import org.apache.hadoop.io.serializer.Serialization;
import org.apache.hadoop.io.serializer.Serializer;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.thrift.TBase;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TIOStreamTransport;
/**
* A {@link Serialization} for types generated by
* <a href="http://incubator.apache.org/thrift/">Apache Thrift</a>.
* Thrift types all descend from {@link TBase}.
* <p>
* To use this serialization, make sure that the Hadoop property
* <code>io.serializations</code> includes the fully-qualified classname of this
* class: <code>com.datasalt.pangool.tuplemr.serialization.thrift.ThriftSerialization</code>.
*/
@SuppressWarnings("rawtypes")
public class ThriftSerialization implements Serialization<TBase> {
public boolean accept(Class<?> c) {
return TBase.class.isAssignableFrom(c);
}
public static class TSerializerAdapter implements Serializer<TBase> {
private TIOStreamTransport transport;
private TProtocol protocol;
public void open(OutputStream out) {
transport = new TIOStreamTransport(out);
protocol = new TBinaryProtocol(transport);
}
public void serialize(TBase t) throws IOException {
try {
t.write(protocol);
} catch (TException e) {
throw new IOException(e);
}
}
public void close() throws IOException {
if (transport != null) {
transport.close();
}
}
}
public org.apache.hadoop.io.serializer.Serializer<TBase> getSerializer(Class<TBase> c) {
return new TSerializerAdapter();
}
public static class TDeserializerAdapter implements Deserializer<TBase> {
private Class<? extends TBase> tClass;
private TIOStreamTransport transport;
private TProtocol protocol;
public TDeserializerAdapter(Class<? extends TBase> tClass) {
this.tClass = tClass;
}
public void open(InputStream in) {
transport = new TIOStreamTransport(in);
protocol = new TBinaryProtocol(transport);
}
public TBase deserialize(TBase t) throws IOException {
TBase object;
if(t == null) {
object = (TBase) ReflectionUtils.newInstance(tClass, null);
} else {
object = t;
object.clear();
}
try {
object.read(protocol);
} catch (TException e) {
throw new IOException(e.toString());
}
return object;
}
public void close() throws IOException {
if (transport != null) {
transport.close();
}
}
}
public org.apache.hadoop.io.serializer.Deserializer<TBase> getDeserializer(Class<TBase> c) {
return new TDeserializerAdapter(c);
}
/**
* Enables Thrift Serialization support in Hadoop.
*/
public static void enableThriftSerialization(Configuration conf) {
String ser = conf.get("io.serializations").trim();
if (ser.length() !=0 ) {
ser += ",";
}
//Adding the Thrift serialization
ser += ThriftSerialization.class.getName();
conf.set("io.serializations", ser);
}
}