0001
0002
0003
0004
0005
0006 package org.apache.spark.sql.execution.datasources.parquet.test.avro;
0007 @SuppressWarnings("all")
0008 @org.apache.avro.specific.AvroGenerated
0009 public class AvroNonNullableArrays extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
0010 public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroNonNullableArrays\",\"namespace\":\"org.apache.spark.sql.execution.datasources.parquet.test.avro\",\"fields\":[{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"maybe_ints_column\",\"type\":[\"null\",{\"type\":\"array\",\"items\":\"int\"}]}]}");
0011 public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
0012 @Deprecated public java.util.List<java.lang.String> strings_column;
0013 @Deprecated public java.util.List<java.lang.Integer> maybe_ints_column;
0014
0015
0016
0017
0018
0019
0020 public AvroNonNullableArrays() {}
0021
0022
0023
0024
0025 public AvroNonNullableArrays(java.util.List<java.lang.String> strings_column, java.util.List<java.lang.Integer> maybe_ints_column) {
0026 this.strings_column = strings_column;
0027 this.maybe_ints_column = maybe_ints_column;
0028 }
0029
0030 public org.apache.avro.Schema getSchema() { return SCHEMA$; }
0031
0032 public java.lang.Object get(int field$) {
0033 switch (field$) {
0034 case 0: return strings_column;
0035 case 1: return maybe_ints_column;
0036 default: throw new org.apache.avro.AvroRuntimeException("Bad index");
0037 }
0038 }
0039
0040 @SuppressWarnings(value="unchecked")
0041 public void put(int field$, java.lang.Object value$) {
0042 switch (field$) {
0043 case 0: strings_column = (java.util.List<java.lang.String>)value$; break;
0044 case 1: maybe_ints_column = (java.util.List<java.lang.Integer>)value$; break;
0045 default: throw new org.apache.avro.AvroRuntimeException("Bad index");
0046 }
0047 }
0048
0049
0050
0051
0052 public java.util.List<java.lang.String> getStringsColumn() {
0053 return strings_column;
0054 }
0055
0056
0057
0058
0059
0060 public void setStringsColumn(java.util.List<java.lang.String> value) {
0061 this.strings_column = value;
0062 }
0063
0064
0065
0066
0067 public java.util.List<java.lang.Integer> getMaybeIntsColumn() {
0068 return maybe_ints_column;
0069 }
0070
0071
0072
0073
0074
0075 public void setMaybeIntsColumn(java.util.List<java.lang.Integer> value) {
0076 this.maybe_ints_column = value;
0077 }
0078
0079
0080 public static org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder newBuilder() {
0081 return new org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder();
0082 }
0083
0084
0085 public static org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder newBuilder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder other) {
0086 return new org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder(other);
0087 }
0088
0089
0090 public static org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder newBuilder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays other) {
0091 return new org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder(other);
0092 }
0093
0094
0095
0096
0097 public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<AvroNonNullableArrays>
0098 implements org.apache.avro.data.RecordBuilder<AvroNonNullableArrays> {
0099
0100 private java.util.List<java.lang.String> strings_column;
0101 private java.util.List<java.lang.Integer> maybe_ints_column;
0102
0103
0104 private Builder() {
0105 super(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.SCHEMA$);
0106 }
0107
0108
0109 private Builder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder other) {
0110 super(other);
0111 if (isValidValue(fields()[0], other.strings_column)) {
0112 this.strings_column = data().deepCopy(fields()[0].schema(), other.strings_column);
0113 fieldSetFlags()[0] = true;
0114 }
0115 if (isValidValue(fields()[1], other.maybe_ints_column)) {
0116 this.maybe_ints_column = data().deepCopy(fields()[1].schema(), other.maybe_ints_column);
0117 fieldSetFlags()[1] = true;
0118 }
0119 }
0120
0121
0122 private Builder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays other) {
0123 super(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.SCHEMA$);
0124 if (isValidValue(fields()[0], other.strings_column)) {
0125 this.strings_column = data().deepCopy(fields()[0].schema(), other.strings_column);
0126 fieldSetFlags()[0] = true;
0127 }
0128 if (isValidValue(fields()[1], other.maybe_ints_column)) {
0129 this.maybe_ints_column = data().deepCopy(fields()[1].schema(), other.maybe_ints_column);
0130 fieldSetFlags()[1] = true;
0131 }
0132 }
0133
0134
0135 public java.util.List<java.lang.String> getStringsColumn() {
0136 return strings_column;
0137 }
0138
0139
0140 public org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder setStringsColumn(java.util.List<java.lang.String> value) {
0141 validate(fields()[0], value);
0142 this.strings_column = value;
0143 fieldSetFlags()[0] = true;
0144 return this;
0145 }
0146
0147
0148 public boolean hasStringsColumn() {
0149 return fieldSetFlags()[0];
0150 }
0151
0152
0153 public org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder clearStringsColumn() {
0154 strings_column = null;
0155 fieldSetFlags()[0] = false;
0156 return this;
0157 }
0158
0159
0160 public java.util.List<java.lang.Integer> getMaybeIntsColumn() {
0161 return maybe_ints_column;
0162 }
0163
0164
0165 public org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder setMaybeIntsColumn(java.util.List<java.lang.Integer> value) {
0166 validate(fields()[1], value);
0167 this.maybe_ints_column = value;
0168 fieldSetFlags()[1] = true;
0169 return this;
0170 }
0171
0172
0173 public boolean hasMaybeIntsColumn() {
0174 return fieldSetFlags()[1];
0175 }
0176
0177
0178 public org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroNonNullableArrays.Builder clearMaybeIntsColumn() {
0179 maybe_ints_column = null;
0180 fieldSetFlags()[1] = false;
0181 return this;
0182 }
0183
0184 @Override
0185 @SuppressWarnings(value="unchecked")
0186 public AvroNonNullableArrays build() {
0187 try {
0188 AvroNonNullableArrays record = new AvroNonNullableArrays();
0189 record.strings_column = fieldSetFlags()[0] ? this.strings_column : (java.util.List<java.lang.String>) defaultValue(fields()[0]);
0190 record.maybe_ints_column = fieldSetFlags()[1] ? this.maybe_ints_column : (java.util.List<java.lang.Integer>) defaultValue(fields()[1]);
0191 return record;
0192 } catch (Exception e) {
0193 throw new org.apache.avro.AvroRuntimeException(e);
0194 }
0195 }
0196 }
0197 }