File |
Project |
Line |
org/apache/flume/sink/hbase/HBaseSink.java |
Flume NG HBase Sink |
468 |
org/apache/flume/sink/hbase2/HBase2Sink.java |
Flume NG HBase2 Sink |
444 |
}
/**
* Perform "compression" on the given set of increments so that Flume sends
* the minimum possible number of RPC operations to HBase per batch.
*
* @param incs Input: Increment objects to coalesce.
* @return List of new Increment objects after coalescing the unique counts.
*/
private List<Increment> coalesceIncrements(Iterable<Increment> incs) {
Preconditions.checkNotNull(incs, "List of Increments must not be null");
// Aggregate all of the increment row/family/column counts.
// The nested map is keyed like this: {row, family, qualifier} => count.
Map<byte[], Map<byte[], NavigableMap<byte[], Long>>> counters =
Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
for (Increment inc : incs) {
byte[] row = inc.getRow();
Map<byte[], NavigableMap<byte[], Long>> families = getFamilyMap(inc);
for (Map.Entry<byte[], NavigableMap<byte[], Long>> familyEntry : families.entrySet()) {
byte[] family = familyEntry.getKey();
NavigableMap<byte[], Long> qualifiers = familyEntry.getValue();
for (Map.Entry<byte[], Long> qualifierEntry : qualifiers.entrySet()) {
byte[] qualifier = qualifierEntry.getKey();
Long count = qualifierEntry.getValue();
incrementCounter(counters, row, family, qualifier, count);
}
}
}
// Reconstruct list of Increments per unique row/family/qualifier.
List<Increment> coalesced = Lists.newLinkedList();
for (Map.Entry<byte[], Map<byte[], NavigableMap<byte[], Long>>> rowEntry :
counters.entrySet()) {
byte[] row = rowEntry.getKey();
Map<byte[], NavigableMap<byte[], Long>> families = rowEntry.getValue();
Increment inc = new Increment(row);
for (Map.Entry<byte[], NavigableMap<byte[], Long>> familyEntry : families.entrySet()) {
byte[] family = familyEntry.getKey();
NavigableMap<byte[], Long> qualifiers = familyEntry.getValue();
for (Map.Entry<byte[], Long> qualifierEntry : qualifiers.entrySet()) {
byte[] qualifier = qualifierEntry.getKey();
long count = qualifierEntry.getValue();
inc.addColumn(family, qualifier, count);
}
}
coalesced.add(inc);
}
return coalesced;
}
/**
* Helper function for {@link #coalesceIncrements} to increment a counter
* value in the passed data structure.
*
* @param counters Nested data structure containing the counters.
* @param row Row key to increment.
* @param family Column family to increment.
* @param qualifier Column qualifier to increment.
* @param count Amount to increment by.
*/
private void incrementCounter(
Map<byte[], Map<byte[], NavigableMap<byte[], Long>>> counters,
byte[] row, byte[] family, byte[] qualifier, Long count) {
Map<byte[], NavigableMap<byte[], Long>> families = counters.get(row); |
File |
Project |
Line |
org/apache/flume/sink/hbase/RegexHbaseEventSerializer.java |
Flume NG HBase Sink |
99 |
org/apache/flume/sink/hbase2/RegexHBase2EventSerializer.java |
Flume NG HBase2 Sink |
98 |
regexIgnoreCase = context.getBoolean(IGNORE_CASE_CONFIG,
IGNORE_CASE_DEFAULT);
depositHeaders = context.getBoolean(DEPOSIT_HEADERS_CONFIG,
DEPOSIT_HEADERS_DEFAULT);
inputPattern = Pattern.compile(regex, Pattern.DOTALL
+ (regexIgnoreCase ? Pattern.CASE_INSENSITIVE : 0));
charset = Charset.forName(context.getString(CHARSET_CONFIG,
CHARSET_DEFAULT));
String colNameStr = context.getString(COL_NAME_CONFIG, COLUMN_NAME_DEFAULT);
String[] columnNames = colNameStr.split(",");
for (String s : columnNames) {
colNames.add(s.getBytes(charset));
}
//Rowkey is optional, default is -1
rowKeyIndex = context.getInteger(ROW_KEY_INDEX_CONFIG, -1);
//if row key is being used, make sure it is specified correct
if (rowKeyIndex >= 0) {
if (rowKeyIndex >= columnNames.length) {
throw new IllegalArgumentException(ROW_KEY_INDEX_CONFIG + " must be " +
"less than num columns " + columnNames.length);
}
if (!ROW_KEY_NAME.equalsIgnoreCase(columnNames[rowKeyIndex])) {
throw new IllegalArgumentException("Column at " + rowKeyIndex + " must be "
+ ROW_KEY_NAME + " and is " + columnNames[rowKeyIndex]);
}
}
}
@Override
public void configure(ComponentConfiguration conf) {
}
@Override
public void initialize(Event event, byte[] columnFamily) {
this.headers = event.getHeaders();
this.payload = event.getBody();
this.cf = columnFamily;
}
/**
* Returns a row-key with the following format:
* [time in millis]-[random key]-[nonce]
*/
protected byte[] getRowKey(Calendar cal) {
/* NOTE: This key generation strategy has the following properties:
*
* 1) Within a single JVM, the same row key will never be duplicated.
* 2) Amongst any two JVM's operating at different time periods (according
* to their respective clocks), the same row key will never be
* duplicated.
* 3) Amongst any two JVM's operating concurrently (according to their
* respective clocks), the odds of duplicating a row-key are non-zero
* but infinitesimal. This would require simultaneous collision in (a)
* the timestamp (b) the respective nonce and (c) the random string.
* The string is necessary since (a) and (b) could collide if a fleet
* of Flume agents are restarted in tandem.
*
* Row-key uniqueness is important because conflicting row-keys will cause
* data loss. */
String rowKey = String.format("%s-%s-%s", cal.getTimeInMillis(),
randomKey, nonce.getAndIncrement());
return rowKey.getBytes(charset);
}
protected byte[] getRowKey() {
return getRowKey(Calendar.getInstance());
}
@Override
public List<Row> getActions() throws FlumeException {
List<Row> actions = Lists.newArrayList();
byte[] rowKey;
Matcher m = inputPattern.matcher(new String(payload, charset));
if (!m.matches()) {
return Lists.newArrayList();
}
if (m.groupCount() != colNames.size()) {
return Lists.newArrayList();
}
try {
if (rowKeyIndex < 0) {
rowKey = getRowKey();
} else {
rowKey = m.group(rowKeyIndex + 1).getBytes(Charsets.UTF_8);
}
Put put = new Put(rowKey);
for (int i = 0; i < colNames.size(); i++) {
if (i != rowKeyIndex) {
put.add(cf, colNames.get(i), m.group(i + 1).getBytes(Charsets.UTF_8)); |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
776 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1568 |
schemes.put(TupleScheme.class, new append_resultTupleSchemeFactory());
}
/**
*
* @see Status
*/
public Status success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
/**
*
* @see Status
*/
SUCCESS((short)0, "success");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, Status.class)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(append_result.class, metaDataMap); |
File |
Project |
Line |
org/apache/flume/channel/file/ReplayHandler.java |
Flume NG file-based channel |
152 |
org/apache/flume/channel/file/ReplayHandler.java |
Flume NG file-based channel |
301 |
if (record.getLogWriteOrderID() > lastCheckpoint) {
if (type == TransactionEventRecord.Type.PUT.get()) {
putCount++;
ptr = new FlumeEventPointer(fileId, offset);
transactionMap.put(trans, ptr);
} else if (type == TransactionEventRecord.Type.TAKE.get()) {
takeCount++;
Take take = (Take) record;
ptr = new FlumeEventPointer(take.getFileID(), take.getOffset());
transactionMap.put(trans, ptr);
} else if (type == TransactionEventRecord.Type.ROLLBACK.get()) {
rollbackCount++;
transactionMap.remove(trans);
} else if (type == TransactionEventRecord.Type.COMMIT.get()) {
commitCount++;
@SuppressWarnings("unchecked")
Collection<FlumeEventPointer> pointers =
(Collection<FlumeEventPointer>) transactionMap.remove(trans);
if (((Commit) record).getType() == TransactionEventRecord.Type.TAKE.get()) {
if (inflightTakes.containsKey(trans)) {
if (pointers == null) {
pointers = Sets.newHashSet();
}
Set<Long> takes = inflightTakes.removeAll(trans);
Iterator<Long> it = takes.iterator();
while (it.hasNext()) {
Long take = it.next();
pointers.add(FlumeEventPointer.fromLong(take));
}
}
}
if (pointers != null && pointers.size() > 0) {
processCommit(((Commit) record).getType(), pointers);
count += pointers.size();
}
} else {
Preconditions.checkArgument(false,
"Unknown record type: " + Integer.toHexString(type));
}
} else {
skipCount++;
}
}
LOG.info("Replayed " + count + " from " + log); |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
735 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
989 |
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new close_argsTupleSchemeFactory();
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
;
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(close_args.class, metaDataMap); |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
783 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1575 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
694 |
public Status success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
/**
*
* @see Status
*/
SUCCESS((short)0, "success");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, Status.class))); |
File |
Project |
Line |
org/apache/flume/auth/KerberosUser.java |
Flume Auth |
22 |
org/apache/flume/sink/hdfs/KerberosUser.java |
Flume NG HDFS Sink |
22 |
public class KerberosUser {
private final String principal;
private final String keyTab;
public KerberosUser(String principal, String keyTab) {
this.principal = principal;
this.keyTab = keyTab;
}
public String getPrincipal() {
return principal;
}
public String getKeyTab() {
return keyTab;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final KerberosUser other = (KerberosUser) obj;
if ((this.principal == null) ?
(other.principal != null) :
!this.principal.equals(other.principal)) {
return false;
}
if ((this.keyTab == null) ? (other.keyTab != null) : !this.keyTab.equals(other.keyTab)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int hash = 7;
hash = 41 * hash + (this.principal != null ? this.principal.hashCode() : 0);
hash = 41 * hash + (this.keyTab != null ? this.keyTab.hashCode() : 0);
return hash;
}
@Override
public String toString() {
return "{ principal: " + principal + ", keytab: " + keyTab + " }";
}
} |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
990 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1782 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
901 |
public int compareTo(append_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("append_result("); |
File |
Project |
Line |
org/apache/flume/sink/hbase/HBaseSink.java |
Flume NG HBase Sink |
261 |
org/apache/flume/sink/hbase2/HBase2Sink.java |
Flume NG HBase2 Sink |
276 |
.ZK_QUORUM);
Integer port = null;
/**
* HBase allows multiple nodes in the quorum, but all need to use the
* same client port. So get the nodes in host:port format,
* and ignore the ports for all nodes except the first one. If no port is
* specified, use default.
*/
if (zkQuorum != null && !zkQuorum.isEmpty()) {
StringBuilder zkBuilder = new StringBuilder();
logger.info("Using ZK Quorum: " + zkQuorum);
String[] zkHosts = zkQuorum.split(",");
int length = zkHosts.length;
for (int i = 0; i < length; i++) {
String[] zkHostAndPort = zkHosts[i].split(":");
zkBuilder.append(zkHostAndPort[0].trim());
if (i != length - 1) {
zkBuilder.append(",");
} else {
zkQuorum = zkBuilder.toString();
}
if (zkHostAndPort[1] == null) {
throw new FlumeException("Expected client port for the ZK node!");
}
if (port == null) {
port = Integer.parseInt(zkHostAndPort[1].trim());
} else if (!port.equals(Integer.parseInt(zkHostAndPort[1].trim()))) {
throw new FlumeException("All Zookeeper nodes in the quorum must " +
"use the same client port.");
}
}
if (port == null) {
port = HConstants.DEFAULT_ZOOKEPER_CLIENT_PORT;
}
this.config.set(HConstants.ZOOKEEPER_QUORUM, zkQuorum);
this.config.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, port);
}
String hbaseZnode = context.getString( |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1024 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1816 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
935 |
StringBuilder sb = new StringBuilder("append_result(");
boolean first = true;
sb.append("success:");
if (this.success == null) {
sb.append("null");
} else {
sb.append(this.success);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class append_resultStandardSchemeFactory implements SchemeFactory { |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5044 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5757 |
private Rollback(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Rollback_descriptor; |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
275 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
189 |
result.success = iface.appendBatch(args.events);
return result;
}
}
}
public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {
private static final Logger LOGGER = LoggerFactory.getLogger(AsyncProcessor.class.getName());
public AsyncProcessor(I iface) {
super(iface, getProcessMap(new HashMap<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>>()));
}
protected AsyncProcessor(I iface, Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
super(iface, getProcessMap(processMap));
}
private static <I extends AsyncIface> Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase,?>> getProcessMap(Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
processMap.put("append", new append()); |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
218 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
153 |
return (new Client(prot)).recv_appendBatch();
}
}
}
public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
public Processor(I iface) {
super(iface, getProcessMap(new HashMap<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));
}
protected Processor(I iface, Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
super(iface, getProcessMap(processMap));
}
private static <I extends Iface> Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> getProcessMap(Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
processMap.put("append", new append()); |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
388 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
756 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
1010 |
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
897 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1689 |
public append_result setSuccess(Status success) {
this.success = success;
return this;
}
public void unsetSuccess() {
this.success = null;
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return this.success != null;
}
public void setSuccessIsSet(boolean value) {
if (!value) {
this.success = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((Status)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof append_result) |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1067 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1859 |
public void read(org.apache.thrift.protocol.TProtocol iprot, append_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.success = org.apache.flume.thrift.Status.findByValue(iprot.readI32());
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, append_result struct) throws org.apache.thrift.TException { |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
893 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
1147 |
java.lang.StringBuilder sb = new java.lang.StringBuilder("close_args(");
boolean first = true;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class close_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { |
File |
Project |
Line |
org/apache/flume/sink/hbase/HBaseSink.java |
Flume NG HBase Sink |
316 |
org/apache/flume/sink/hbase2/HBase2Sink.java |
Flume NG HBase2 Sink |
331 |
List<Increment> incs = new LinkedList<Increment>();
try {
txn.begin();
if (serializer instanceof BatchAware) {
((BatchAware) serializer).onBatchStart();
}
long i = 0;
for (; i < batchSize; i++) {
Event event = channel.take();
if (event == null) {
if (i == 0) {
status = Status.BACKOFF;
sinkCounter.incrementBatchEmptyCount();
} else {
sinkCounter.incrementBatchUnderflowCount();
}
break;
} else {
serializer.initialize(event, columnFamily);
actions.addAll(serializer.getActions());
incs.addAll(serializer.getIncrements());
}
}
if (i == batchSize) {
sinkCounter.incrementBatchCompleteCount();
}
sinkCounter.addToEventDrainAttemptCount(i);
putEventsAndCommit(actions, incs, txn);
} catch (Throwable e) {
try {
txn.rollback();
} catch (Exception e2) {
logger.error("Exception in rollback. Rollback might not have been " +
"successful.", e2);
}
logger.error("Failed to commit transaction." +
"Transaction rolled back.", e); |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1031 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1412 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1823 |
org/apache/flume/source/scribe/LogEntry.java |
Flume Scribe Source |
385 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
531 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
942 |
sb.append(this.success);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class append_resultStandardSchemeFactory implements SchemeFactory { |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5101 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5814 |
public com.google.protobuf.Parser<Rollback> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.flume.channel.file.proto.ProtosFactory.Rollback parseFrom( |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
96 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
138 |
}
}
public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
private org.apache.thrift.async.TAsyncClientManager clientManager;
private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
this.clientManager = clientManager;
this.protocolFactory = protocolFactory;
}
public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
return new AsyncClient(protocolFactory, clientManager, transport);
}
}
public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
super(protocolFactory, clientManager, transport);
}
public void append(ThriftFlumeEvent evt, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws org.apache.thrift.TException { |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
70 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
60 |
public void appendBatch(List<ThriftFlumeEvent> events, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
}
public static class Client extends org.apache.thrift.TServiceClient implements Iface {
public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
public Factory() {}
public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
return new Client(prot);
}
public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
return new Client(iprot, oprot);
}
}
public Client(org.apache.thrift.protocol.TProtocol prot)
{
super(prot, prot);
}
public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
super(iprot, oprot);
}
public Status append(ThriftFlumeEvent event) throws org.apache.thrift.TException |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
868 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
1122 |
public int compareTo(close_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("close_args("); |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
137 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
104 |
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "appendBatch failed: unknown result");
}
}
public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
private org.apache.thrift.async.TAsyncClientManager clientManager;
private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
this.clientManager = clientManager;
this.protocolFactory = protocolFactory;
}
public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
return new AsyncClient(protocolFactory, clientManager, transport);
}
}
public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
super(protocolFactory, clientManager, transport);
}
public void append(ThriftFlumeEvent event, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException { |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
96 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
105 |
}
}
public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
private org.apache.thrift.async.TAsyncClientManager clientManager;
private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
this.clientManager = clientManager;
this.protocolFactory = protocolFactory;
}
public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
return new AsyncClient(protocolFactory, clientManager, transport);
}
}
public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
super(protocolFactory, clientManager, transport);
}
public void append(ThriftFlumeEvent evt, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws org.apache.thrift.TException { |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
43 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
70 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
60 |
public void close(org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws org.apache.thrift.TException;
}
public static class Client extends org.apache.thrift.TServiceClient implements Iface {
public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
public Factory() {}
public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
return new Client(prot);
}
public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
return new Client(iprot, oprot);
}
}
public Client(org.apache.thrift.protocol.TProtocol prot)
{
super(prot, prot);
}
public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
super(iprot, oprot);
}
public void append(ThriftFlumeEvent evt) throws org.apache.thrift.TException |
File |
Project |
Line |
org/apache/flume/thrift/ThriftFlumeEvent.java |
Flume NG SDK |
93 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
434 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
808 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1177 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1600 |
org/apache/flume/source/scribe/LogEntry.java |
Flume Scribe Source |
93 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
296 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
719 |
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.HEADERS, new org.apache.thrift.meta_data.FieldMetaData("headers", org.apache.thrift.TFieldRequirementType.REQUIRED, |
File |
Project |
Line |
org/apache/flume/serialization/LineDeserializer.java |
Flume NG Core |
74 |
org/apache/flume/sink/solr/morphline/BlobDeserializer.java |
Flume NG Morphline Solr Sink |
96 |
return EventBuilder.withBody(line, outputCharset);
}
}
/**
* Batch line read
* @param numEvents Maximum number of events to return.
* @return List of events containing read lines
* @throws IOException
*/
@Override
public List<Event> readEvents(int numEvents) throws IOException {
ensureOpen();
List<Event> events = Lists.newLinkedList();
for (int i = 0; i < numEvents; i++) {
Event event = readEvent();
if (event != null) {
events.add(event);
} else {
break;
}
}
return events;
}
@Override
public void mark() throws IOException {
ensureOpen();
in.mark();
}
@Override
public void reset() throws IOException {
ensureOpen();
in.reset();
}
@Override
public void close() throws IOException {
if (isOpen) {
reset();
in.close();
isOpen = false;
}
}
private void ensureOpen() {
if (!isOpen) {
throw new IllegalStateException("Serializer has been closed");
}
} |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
621 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
903 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
1157 |
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class append_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { |
File |
Project |
Line |
org/apache/flume/thrift/ThriftFlumeEvent.java |
Flume NG SDK |
422 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
660 |
throw new org.apache.thrift.protocol.TProtocolException("Required field 'body' was not present! Struct: " + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ThriftFlumeEventStandardSchemeFactory implements SchemeFactory { |
File |
Project |
Line |
org/apache/flume/thrift/ThriftFlumeEvent.java |
Flume NG SDK |
425 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
662 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1041 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1422 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1833 |
org/apache/flume/source/scribe/LogEntry.java |
Flume Scribe Source |
395 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
541 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
952 |
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ThriftFlumeEventStandardSchemeFactory implements SchemeFactory { |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
812 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
1066 |
return new close_args(this);
}
@Override
public void clear() {
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof close_args) |
File |
Project |
Line |
org/apache/flume/channel/kafka/KafkaChannel.java |
flume-kafka-channel |
657 |
org/apache/flume/sink/kafka/KafkaSink.java |
Flume Kafka Sink |
476 |
if (parseAsFlumeEvent) {
if (!tempOutStream.isPresent()) {
tempOutStream = Optional.of(new ByteArrayOutputStream());
}
if (!writer.isPresent()) {
writer = Optional.of(new
SpecificDatumWriter<AvroFlumeEvent>(AvroFlumeEvent.class));
}
tempOutStream.get().reset();
AvroFlumeEvent e = new AvroFlumeEvent(
toCharSeqMap(event.getHeaders()),
ByteBuffer.wrap(event.getBody()));
encoder = EncoderFactory.get()
.directBinaryEncoder(tempOutStream.get(), encoder);
writer.get().write(e, encoder);
encoder.flush();
bytes = tempOutStream.get().toByteArray();
} else {
bytes = event.getBody();
}
return bytes;
}
private Event deserializeValue(byte[] value, boolean parseAsFlumeEvent) throws IOException { |
File |
Project |
Line |
org/apache/flume/sink/hbase/SimpleRowKeyGenerator.java |
Flume NG HBase Sink |
29 |
org/apache/flume/sink/hbase2/SimpleRowKeyGenerator.java |
Flume NG HBase2 Sink |
29 |
public class SimpleRowKeyGenerator {
public static byte[] getUUIDKey(String prefix) throws UnsupportedEncodingException {
return (prefix + UUID.randomUUID().toString()).getBytes("UTF8");
}
public static byte[] getRandomKey(String prefix) throws UnsupportedEncodingException {
return (prefix + String.valueOf(new Random().nextLong())).getBytes("UTF8");
}
public static byte[] getTimestampKey(String prefix) throws UnsupportedEncodingException {
return (prefix + String.valueOf(System.currentTimeMillis())).getBytes("UTF8");
}
public static byte[] getNanoTimestampKey(String prefix) throws UnsupportedEncodingException {
return (prefix + String.valueOf(System.nanoTime())).getBytes("UTF8");
}
} |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEvent.java |
Flume legacy Thrift Source |
658 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
578 |
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.fields, other.fields);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("ThriftFlumeEvent("); |
File |
Project |
Line |
org/apache/flume/thrift/ThriftFlumeEvent.java |
Flume NG SDK |
372 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
620 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1002 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1383 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1794 |
org/apache/flume/source/scribe/LogEntry.java |
Flume Scribe Source |
348 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
502 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
913 |
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.body, other.body);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("ThriftFlumeEvent("); |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5221 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5934 |
org.apache.flume.channel.file.proto.ProtosFactory.Rollback.class, org.apache.flume.channel.file.proto.ProtosFactory.Rollback.Builder.class);
}
// Construct using org.apache.flume.channel.file.proto.ProtosFactory.Rollback.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Rollback_descriptor; |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
961 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1753 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
872 |
public boolean equals(append_result that) {
if (that == null)
return false;
boolean this_present_success = true && this.isSetSuccess();
boolean that_present_success = true && that.isSetSuccess();
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (!this.success.equals(that.success))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_success = true && (isSetSuccess());
list.add(present_success);
if (present_success)
list.add(success.getValue());
return list.hashCode();
}
@Override
public int compareTo(append_result other) { |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEvent.java |
Flume legacy Thrift Source |
92 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
388 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
756 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
1010 |
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
} |
File |
Project |
Line |
org/apache/flume/sink/hbase/HBaseSinkConfigurationConstants.java |
Flume NG HBase Sink |
27 |
org/apache/flume/sink/hbase2/HBase2SinkConfigurationConstants.java |
Flume NG HBase2 Sink |
27 |
public class HBaseSinkConfigurationConstants {
/**
* The Hbase table which the sink should write to.
*/
public static final String CONFIG_TABLE = "table";
/**
* The column family which the sink should use.
*/
public static final String CONFIG_COLUMN_FAMILY = "columnFamily";
/**
* Maximum number of events the sink should take from the channel per
* transaction, if available.
*/
public static final String CONFIG_BATCHSIZE = "batchSize";
/**
* The fully qualified class name of the serializer the sink should use.
*/
public static final String CONFIG_SERIALIZER = "serializer";
/**
* Configuration to pass to the serializer.
*/
public static final String CONFIG_SERIALIZER_PREFIX = CONFIG_SERIALIZER + ".";
public static final String CONFIG_TIMEOUT = "timeout";
public static final String CONFIG_ENABLE_WAL = "enableWal";
public static final boolean DEFAULT_ENABLE_WAL = true;
public static final long DEFAULT_TIMEOUT = 60000;
public static final String CONFIG_KEYTAB = "kerberosKeytab";
public static final String CONFIG_PRINCIPAL = "kerberosPrincipal";
public static final String ZK_QUORUM = "zookeeperQuorum";
public static final String ZK_ZNODE_PARENT = "znodeParent";
public static final String DEFAULT_ZK_ZNODE_PARENT =
HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT;
public static final String CONFIG_COALESCE_INCREMENTS = "coalesceIncrements";
public static final Boolean DEFAULT_COALESCE_INCREMENTS = false;
public static final int DEFAULT_MAX_CONSECUTIVE_FAILS = 10;
public static final String CONFIG_MAX_CONSECUTIVE_FAILS = "maxConsecutiveFails"; |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
125 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1672 |
private Checkpoint(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 13: {
bitField0_ |= 0x00000001;
version_ = input.readSFixed32();
break;
}
case 17: { |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
3392 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5363 |
private TransactionEventHeader(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 13: {
bitField0_ |= 0x00000001;
type_ = input.readSFixed32();
break;
} |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEvent.java |
Flume legacy Thrift Source |
663 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
583 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
875 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
1129 |
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("ThriftFlumeEvent("); |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEvent.java |
Flume legacy Thrift Source |
723 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
896 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
1150 |
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try { |
File |
Project |
Line |
org/apache/flume/api/ThriftRpcClient.java |
Flume NG SDK |
117 |
org/apache/flume/api/ThriftRpcClient.java |
Flume NG SDK |
172 |
} catch (Throwable e) {
if (e instanceof ExecutionException) {
Throwable cause = e.getCause();
if (cause instanceof EventDeliveryException) {
throw (EventDeliveryException) cause;
} else if (cause instanceof TimeoutException) {
throw new EventDeliveryException("Append call timeout", cause);
}
}
destroyedClient = true;
// If destroy throws, we still don't want to reuse the client, so mark it
// as destroyed before we actually do.
if (client != null) {
connectionManager.destroy(client);
}
if (e instanceof Error) {
throw (Error) e;
} else if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new EventDeliveryException("Failed to send event. ", e);
} finally {
if (client != null && !destroyedClient) {
connectionManager.checkIn(client);
}
}
} |
File |
Project |
Line |
org/apache/flume/channel/kafka/KafkaChannel.java |
flume-kafka-channel |
236 |
org/apache/flume/sink/kafka/KafkaSink.java |
Flume Kafka Sink |
393 |
counter = new KafkaChannelCounter(getName());
}
}
// We can remove this once the properties are officially deprecated
private void translateOldProps(Context ctx) {
if (!(ctx.containsKey(TOPIC_CONFIG))) {
ctx.put(TOPIC_CONFIG, ctx.getString("topic"));
logger.warn("{} is deprecated. Please use the parameter {}", "topic", TOPIC_CONFIG);
}
// Broker List
// If there is no value we need to check and set the old param and log a warning message
if (!(ctx.containsKey(BOOTSTRAP_SERVERS_CONFIG))) {
String brokerList = ctx.getString(BROKER_LIST_FLUME_KEY);
if (brokerList == null || brokerList.isEmpty()) {
throw new ConfigurationException("Bootstrap Servers must be specified");
} else {
ctx.put(BOOTSTRAP_SERVERS_CONFIG, brokerList);
logger.warn("{} is deprecated. Please use the parameter {}",
BROKER_LIST_FLUME_KEY, BOOTSTRAP_SERVERS_CONFIG);
}
}
// GroupId
// If there is an old Group Id set, then use that if no groupId is set.
if (!(ctx.containsKey(KAFKA_CONSUMER_PREFIX + ConsumerConfig.GROUP_ID_CONFIG))) { |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEvent.java |
Flume legacy Thrift Source |
720 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1031 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1412 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1823 |
org/apache/flume/source/scribe/LogEntry.java |
Flume Scribe Source |
385 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
531 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
942 |
sb.append(this.fields);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
125 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1125 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1672 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
3392 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4568 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5363 |
private Checkpoint(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 13: {
bitField0_ |= 0x00000001; |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
2675 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
6877 |
private LogFileEncryption(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001; |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
896 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
1150 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1034 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1415 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
1826 |
org/apache/flume/source/scribe/LogEntry.java |
Flume Scribe Source |
388 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
534 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
945 |
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
929 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
1183 |
public void read(org.apache.thrift.protocol.TProtocol iprot, close_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, close_args struct) throws org.apache.thrift.TException { |
File |
Project |
Line |
org/apache/flume/sink/hbase/HBaseSink.java |
Flume NG HBase Sink |
165 |
org/apache/flume/sink/hbase2/HBase2Sink.java |
Flume NG HBase2 Sink |
172 |
}
})) {
throw new IOException("Table " + tableName
+ " has no such column family " + Bytes.toString(columnFamily));
}
} catch (Exception e) {
//Get getTableDescriptor also throws IOException, so catch the IOException
//thrown above or by the getTableDescriptor() call.
sinkCounter.incrementConnectionFailedCount();
throw new FlumeException("Error getting column family from HBase."
+ "Please verify that the table " + tableName + " and Column Family, "
+ Bytes.toString(columnFamily) + " exists in HBase, and the"
+ " current user has permissions to access that table.", e);
}
super.start();
sinkCounter.incrementConnectionCreatedCount();
sinkCounter.start();
}
@Override
public void stop() {
try {
if (table != null) {
table.close();
}
table = null;
} catch (IOException e) {
throw new FlumeException("Error closing table.", e);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
2675 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
3968 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
6101 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
6877 |
private LogFileEncryption(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: { |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
175 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
207 |
args.setEvent(event);
args.write(prot);
prot.writeMessageEnd();
}
public Status getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_append(); |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
125 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1125 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1672 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
2675 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
3392 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
3968 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4568 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5363 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
6101 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
6877 |
private Checkpoint(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 13: { |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
133 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
162 |
args.setEvt(evt);
args.write(prot);
prot.writeMessageEnd();
}
public Void getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new java.lang.IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return null;
}
} |
File |
Project |
Line |
org/apache/flume/channel/kafka/KafkaChannel.java |
flume-kafka-channel |
395 |
org/apache/flume/source/kafka/KafkaSource.java |
Flume Kafka Source |
639 |
KafkaConsumer<String, byte[]> client) {
Map<TopicPartition, OffsetAndMetadata> offsets = null;
List<PartitionInfo> partitions = client.partitionsFor(topicStr);
if (partitions != null) {
offsets = new HashMap<>();
for (PartitionInfo partition : partitions) {
TopicPartition key = new TopicPartition(topicStr, partition.partition());
OffsetAndMetadata offsetAndMetadata = client.committed(key);
if (offsetAndMetadata != null) {
offsets.put(key, offsetAndMetadata);
}
}
}
return offsets;
}
private Map<TopicPartition, OffsetAndMetadata> getZookeeperOffsets(
KafkaZkClient zkClient, KafkaConsumer<String, byte[]> consumer) { |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
645 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
686 |
private static class append_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<append_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, append_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // EVT
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.evt = new ThriftFlumeEvent(); |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1155 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4598 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5388 |
count_ = input.readSFixed32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_ActiveLog_descriptor; |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1735 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
3427 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4006 |
backupCheckpointWriteOrderID_ = input.readSFixed64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_LogFileMetaData_descriptor; |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
2710 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
6907 |
parameters_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_LogFileEncryption_descriptor; |
File |
Project |
Line |
org/apache/flume/sink/hdfs/HDFSTextSerializer.java |
Flume NG HDFS Sink |
43 |
org/apache/flume/sink/hdfs/HDFSWritableSerializer.java |
Flume NG HDFS Sink |
42 |
return Text.class;
}
@Override
public Iterable<Record> serialize(Event e) {
Object key = getKey(e);
Object value = getValue(e);
return Collections.singletonList(new Record(key, value));
}
private Object getKey(Event e) {
// Write the data to HDFS
String timestamp = e.getHeaders().get("timestamp");
long eventStamp;
if (timestamp == null) {
eventStamp = System.currentTimeMillis();
} else {
eventStamp = Long.valueOf(timestamp);
}
return new LongWritable(eventStamp);
}
private Object getValue(Event e) {
return makeText(e); |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
324 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
375 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
237 |
append_result result = new append_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, append_args args, org.apache.thrift.async.AsyncMethodCallback<Status> resultHandler) throws TException { |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1155 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1735 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
2710 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
3427 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4006 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4598 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5388 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
6907 |
count_ = input.readSFixed32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_ActiveLog_descriptor; |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEvent.java |
Flume legacy Thrift Source |
756 |
org/apache/flume/thrift/ThriftFlumeEvent.java |
Flume NG SDK |
449 |
private static class ThriftFlumeEventStandardScheme extends org.apache.thrift.scheme.StandardScheme<ThriftFlumeEvent> {
public void read(org.apache.thrift.protocol.TProtocol iprot, ThriftFlumeEvent struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // TIMESTAMP
if (schemeField.type == org.apache.thrift.protocol.TType.I64) { |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
661 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
702 |
struct.setEvtIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, append_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.evt != null) { |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
191 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
488 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Checkpoint_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Checkpoint_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.Checkpoint.class, org.apache.flume.channel.file.proto.ProtosFactory.Checkpoint.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1170 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1361 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_ActiveLog_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_ActiveLog_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.ActiveLog.class, org.apache.flume.channel.file.proto.ProtosFactory.ActiveLog.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1750 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
2081 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_LogFileMetaData_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_LogFileMetaData_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.LogFileMetaData.class, org.apache.flume.channel.file.proto.ProtosFactory.LogFileMetaData.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
2725 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
2994 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_LogFileEncryption_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_LogFileEncryption_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.LogFileEncryption.class, org.apache.flume.channel.file.proto.ProtosFactory.LogFileEncryption.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
3442 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
3661 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_TransactionEventHeader_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_TransactionEventHeader_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.TransactionEventHeader.class, org.apache.flume.channel.file.proto.ProtosFactory.TransactionEventHeader.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4021 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4218 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Put_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Put_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.Put.class, org.apache.flume.channel.file.proto.ProtosFactory.Put.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4613 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4804 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Take_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Take_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.Take.class, org.apache.flume.channel.file.proto.ProtosFactory.Take.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5078 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5212 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Rollback_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Rollback_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.Rollback.class, org.apache.flume.channel.file.proto.ProtosFactory.Rollback.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5403 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5566 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Commit_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_Commit_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.Commit.class, org.apache.flume.channel.file.proto.ProtosFactory.Commit.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5791 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5925 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_TransactionEventFooter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_TransactionEventFooter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.TransactionEventFooter.class, org.apache.flume.channel.file.proto.ProtosFactory.TransactionEventFooter.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
6152 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
6365 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_FlumeEvent_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_FlumeEvent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.FlumeEvent.class, org.apache.flume.channel.file.proto.ProtosFactory.FlumeEvent.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
6922 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
7163 |
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_FlumeEventHeader_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_FlumeEventHeader_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.flume.channel.file.proto.ProtosFactory.FlumeEventHeader.class, org.apache.flume.channel.file.proto.ProtosFactory.FlumeEventHeader.Builder.class);
} |
File |
Project |
Line |
org/apache/flume/channel/kafka/KafkaChannel.java |
flume-kafka-channel |
412 |
org/apache/flume/source/kafka/KafkaSource.java |
Flume Kafka Source |
656 |
KafkaZkClient zkClient, KafkaConsumer<String, byte[]> consumer) {
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
List<PartitionInfo> partitions = consumer.partitionsFor(topicStr);
for (PartitionInfo partition : partitions) {
TopicPartition topicPartition = new TopicPartition(topicStr, partition.partition());
Option<Object> optionOffset = zkClient.getConsumerOffset(groupId, topicPartition);
if (optionOffset.nonEmpty()) {
Long offset = (Long) optionOffset.get();
OffsetAndMetadata offsetAndMetadata = new OffsetAndMetadata(offset);
offsets.put(topicPartition, offsetAndMetadata);
}
}
return offsets;
} |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1156 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
1736 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
2711 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
3428 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4007 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
4599 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5064 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5389 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5777 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
6908 |
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.flume.channel.file.proto.ProtosFactory.internal_static_ActiveLog_descriptor; |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
680 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
2267 |
parsedMessage = (org.apache.flume.channel.file.proto.ProtosFactory.Checkpoint) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required sfixed32 version = 1;
private int version_ ;
/**
* <code>required sfixed32 version = 1;</code>
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required sfixed32 version = 1;</code>
*/
public int getVersion() {
return version_;
}
/**
* <code>required sfixed32 version = 1;</code>
*/
public Builder setVersion(int value) {
bitField0_ |= 0x00000001;
version_ = value;
onChanged();
return this;
}
/**
* <code>required sfixed32 version = 1;</code>
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = 0;
onChanged();
return this;
}
// required sfixed64 writeOrderID = 2;
private long writeOrderID_ ; |
File |
Project |
Line |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
3792 |
org/apache/flume/channel/file/proto/ProtosFactory.java |
Flume NG file-based channel |
5671 |
parsedMessage = (org.apache.flume.channel.file.proto.ProtosFactory.TransactionEventHeader) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required sfixed32 type = 1;
private int type_ ;
/**
* <code>required sfixed32 type = 1;</code>
*/
public boolean hasType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required sfixed32 type = 1;</code>
*/
public int getType() {
return type_;
}
/**
* <code>required sfixed32 type = 1;</code>
*/
public Builder setType(int value) {
bitField0_ |= 0x00000001;
type_ = value;
onChanged();
return this;
}
/**
* <code>required sfixed32 type = 1;</code>
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000001);
type_ = 0;
onChanged();
return this;
} |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEvent.java |
Flume legacy Thrift Source |
823 |
org/apache/flume/thrift/ThriftFlumeEvent.java |
Flume NG SDK |
484 |
struct.setFieldsIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, ThriftFlumeEvent struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC); |
File |
Project |
Line |
org/apache/flume/sink/hbase/SimpleHbaseEventSerializer.java |
Flume NG HBase Sink |
80 |
org/apache/flume/sink/hbase2/SimpleHBase2EventSerializer.java |
Flume NG HBase2 Sink |
85 |
}
plCol = payloadColumn.getBytes(Charsets.UTF_8);
}
if (incColumn != null && !incColumn.isEmpty()) {
incCol = incColumn.getBytes(Charsets.UTF_8);
}
}
@Override
public void configure(ComponentConfiguration conf) {
}
@Override
public void initialize(Event event, byte[] cf) {
this.payload = event.getBody();
this.cf = cf;
}
@Override
public List<Row> getActions() throws FlumeException {
List<Row> actions = new LinkedList<Row>(); |
File |
Project |
Line |
org/apache/flume/channel/file/EventQueueBackingStoreFileV3.java |
Flume NG file-based channel |
144 |
org/apache/flume/channel/file/EventQueueBackingStoreFileV3.java |
Flume NG file-based channel |
183 |
if (count != 0) {
ProtosFactory.ActiveLog.Builder activeLogBuilder =
ProtosFactory.ActiveLog.newBuilder();
activeLogBuilder.setLogFileID(logFileID);
activeLogBuilder.setCount(count);
checkpointBuilder.addActiveLogs(activeLogBuilder.build());
}
}
FileOutputStream outputStream = new FileOutputStream(metaDataFile);
try {
checkpointBuilder.build().writeDelimitedTo(outputStream);
outputStream.getChannel().force(true);
} finally {
try {
outputStream.close();
} catch (IOException e) {
LOG.warn("Unable to close " + metaDataFile, e);
}
} |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEvent.java |
Flume legacy Thrift Source |
64 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
370 |
FIELDS((short)6, "fields");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // TIMESTAMP
return TIMESTAMP; |
File |
Project |
Line |
com/cloudera/flume/handlers/thrift/ThriftFlumeEvent.java |
Flume legacy Thrift Source |
730 |
com/cloudera/flume/handlers/thrift/ThriftFlumeEventServer.java |
Flume legacy Thrift Source |
621 |
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try { |
File |
Project |
Line |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
180 |
org/apache/flume/thrift/ThriftSourceProtocol.java |
Flume NG SDK |
212 |
org/apache/flume/source/scribe/Scribe.java |
Flume Scribe Source |
147 |
public Status getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_append(); |
File |
Project |
Line |
org/apache/flume/sink/solr/morphline/BlobDeserializer.java |
Flume NG Morphline Solr Sink |
76 |
org/apache/flume/sink/solr/morphline/BlobHandler.java |
Flume NG Morphline Solr Sink |
80 |
ByteArrayOutputStream blob = null;
byte[] buf = new byte[Math.min(maxBlobLength, DEFAULT_BUFFER_SIZE)];
int blobLength = 0;
int n = 0;
while ((n = in.read(buf, 0, Math.min(buf.length, maxBlobLength - blobLength))) != -1) {
if (blob == null) {
blob = new ByteArrayOutputStream(n);
}
blob.write(buf, 0, n);
blobLength += n;
if (blobLength >= maxBlobLength) {
LOGGER.warn("File length exceeds maxBlobLength ({}), truncating BLOB event!", |