0001
0002
0003
0004
0005
0006 package org.apache.spark.sql.execution.datasources.parquet.test.avro;
0007 @SuppressWarnings("all")
0008 @org.apache.avro.specific.AvroGenerated
0009 public class AvroMapOfArray extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
0010 public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroMapOfArray\",\"namespace\":\"org.apache.spark.sql.execution.datasources.parquet.test.avro\",\"fields\":[{\"name\":\"string_to_ints_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"int\"},\"avro.java.string\":\"String\"}}]}");
0011 public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
0012 @Deprecated public java.util.Map<java.lang.String,java.util.List<java.lang.Integer>> string_to_ints_column;
0013
0014
0015
0016
0017
0018
0019 public AvroMapOfArray() {}
0020
0021
0022
0023
0024 public AvroMapOfArray(java.util.Map<java.lang.String,java.util.List<java.lang.Integer>> string_to_ints_column) {
0025 this.string_to_ints_column = string_to_ints_column;
0026 }
0027
0028 public org.apache.avro.Schema getSchema() { return SCHEMA$; }
0029
0030 public java.lang.Object get(int field$) {
0031 switch (field$) {
0032 case 0: return string_to_ints_column;
0033 default: throw new org.apache.avro.AvroRuntimeException("Bad index");
0034 }
0035 }
0036
0037 @SuppressWarnings(value="unchecked")
0038 public void put(int field$, java.lang.Object value$) {
0039 switch (field$) {
0040 case 0: string_to_ints_column = (java.util.Map<java.lang.String,java.util.List<java.lang.Integer>>)value$; break;
0041 default: throw new org.apache.avro.AvroRuntimeException("Bad index");
0042 }
0043 }
0044
0045
0046
0047
0048 public java.util.Map<java.lang.String,java.util.List<java.lang.Integer>> getStringToIntsColumn() {
0049 return string_to_ints_column;
0050 }
0051
0052
0053
0054
0055
0056 public void setStringToIntsColumn(java.util.Map<java.lang.String,java.util.List<java.lang.Integer>> value) {
0057 this.string_to_ints_column = value;
0058 }
0059
0060
0061 public static org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.Builder newBuilder() {
0062 return new org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.Builder();
0063 }
0064
0065
0066 public static org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.Builder newBuilder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.Builder other) {
0067 return new org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.Builder(other);
0068 }
0069
0070
0071 public static org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.Builder newBuilder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray other) {
0072 return new org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.Builder(other);
0073 }
0074
0075
0076
0077
0078 public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<AvroMapOfArray>
0079 implements org.apache.avro.data.RecordBuilder<AvroMapOfArray> {
0080
0081 private java.util.Map<java.lang.String,java.util.List<java.lang.Integer>> string_to_ints_column;
0082
0083
0084 private Builder() {
0085 super(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.SCHEMA$);
0086 }
0087
0088
0089 private Builder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.Builder other) {
0090 super(other);
0091 if (isValidValue(fields()[0], other.string_to_ints_column)) {
0092 this.string_to_ints_column = data().deepCopy(fields()[0].schema(), other.string_to_ints_column);
0093 fieldSetFlags()[0] = true;
0094 }
0095 }
0096
0097
0098 private Builder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray other) {
0099 super(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.SCHEMA$);
0100 if (isValidValue(fields()[0], other.string_to_ints_column)) {
0101 this.string_to_ints_column = data().deepCopy(fields()[0].schema(), other.string_to_ints_column);
0102 fieldSetFlags()[0] = true;
0103 }
0104 }
0105
0106
0107 public java.util.Map<java.lang.String,java.util.List<java.lang.Integer>> getStringToIntsColumn() {
0108 return string_to_ints_column;
0109 }
0110
0111
0112 public org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.Builder setStringToIntsColumn(java.util.Map<java.lang.String,java.util.List<java.lang.Integer>> value) {
0113 validate(fields()[0], value);
0114 this.string_to_ints_column = value;
0115 fieldSetFlags()[0] = true;
0116 return this;
0117 }
0118
0119
0120 public boolean hasStringToIntsColumn() {
0121 return fieldSetFlags()[0];
0122 }
0123
0124
0125 public org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroMapOfArray.Builder clearStringToIntsColumn() {
0126 string_to_ints_column = null;
0127 fieldSetFlags()[0] = false;
0128 return this;
0129 }
0130
0131 @Override
0132 @SuppressWarnings(value="unchecked")
0133 public AvroMapOfArray build() {
0134 try {
0135 AvroMapOfArray record = new AvroMapOfArray();
0136 record.string_to_ints_column = fieldSetFlags()[0] ? this.string_to_ints_column : (java.util.Map<java.lang.String,java.util.List<java.lang.Integer>>) defaultValue(fields()[0]);
0137 return record;
0138 } catch (Exception e) {
0139 throw new org.apache.avro.AvroRuntimeException(e);
0140 }
0141 }
0142 }
0143 }