0001
0002
0003
0004
0005
0006 package org.apache.spark.sql.execution.datasources.parquet.test.avro;
0007 @SuppressWarnings("all")
0008 @org.apache.avro.specific.AvroGenerated
0009 public class AvroArrayOfArray extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
0010 public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroArrayOfArray\",\"namespace\":\"org.apache.spark.sql.execution.datasources.parquet.test.avro\",\"fields\":[{\"name\":\"int_arrays_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"array\",\"items\":\"int\"}}}]}");
0011 public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
0012 @Deprecated public java.util.List<java.util.List<java.lang.Integer>> int_arrays_column;
0013
0014
0015
0016
0017
0018
0019 public AvroArrayOfArray() {}
0020
0021
0022
0023
0024 public AvroArrayOfArray(java.util.List<java.util.List<java.lang.Integer>> int_arrays_column) {
0025 this.int_arrays_column = int_arrays_column;
0026 }
0027
0028 public org.apache.avro.Schema getSchema() { return SCHEMA$; }
0029
0030 public java.lang.Object get(int field$) {
0031 switch (field$) {
0032 case 0: return int_arrays_column;
0033 default: throw new org.apache.avro.AvroRuntimeException("Bad index");
0034 }
0035 }
0036
0037 @SuppressWarnings(value="unchecked")
0038 public void put(int field$, java.lang.Object value$) {
0039 switch (field$) {
0040 case 0: int_arrays_column = (java.util.List<java.util.List<java.lang.Integer>>)value$; break;
0041 default: throw new org.apache.avro.AvroRuntimeException("Bad index");
0042 }
0043 }
0044
0045
0046
0047
0048 public java.util.List<java.util.List<java.lang.Integer>> getIntArraysColumn() {
0049 return int_arrays_column;
0050 }
0051
0052
0053
0054
0055
0056 public void setIntArraysColumn(java.util.List<java.util.List<java.lang.Integer>> value) {
0057 this.int_arrays_column = value;
0058 }
0059
0060
0061 public static org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.Builder newBuilder() {
0062 return new org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.Builder();
0063 }
0064
0065
0066 public static org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.Builder newBuilder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.Builder other) {
0067 return new org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.Builder(other);
0068 }
0069
0070
0071 public static org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.Builder newBuilder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray other) {
0072 return new org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.Builder(other);
0073 }
0074
0075
0076
0077
0078 public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<AvroArrayOfArray>
0079 implements org.apache.avro.data.RecordBuilder<AvroArrayOfArray> {
0080
0081 private java.util.List<java.util.List<java.lang.Integer>> int_arrays_column;
0082
0083
0084 private Builder() {
0085 super(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.SCHEMA$);
0086 }
0087
0088
0089 private Builder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.Builder other) {
0090 super(other);
0091 if (isValidValue(fields()[0], other.int_arrays_column)) {
0092 this.int_arrays_column = data().deepCopy(fields()[0].schema(), other.int_arrays_column);
0093 fieldSetFlags()[0] = true;
0094 }
0095 }
0096
0097
0098 private Builder(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray other) {
0099 super(org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.SCHEMA$);
0100 if (isValidValue(fields()[0], other.int_arrays_column)) {
0101 this.int_arrays_column = data().deepCopy(fields()[0].schema(), other.int_arrays_column);
0102 fieldSetFlags()[0] = true;
0103 }
0104 }
0105
0106
0107 public java.util.List<java.util.List<java.lang.Integer>> getIntArraysColumn() {
0108 return int_arrays_column;
0109 }
0110
0111
0112 public org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.Builder setIntArraysColumn(java.util.List<java.util.List<java.lang.Integer>> value) {
0113 validate(fields()[0], value);
0114 this.int_arrays_column = value;
0115 fieldSetFlags()[0] = true;
0116 return this;
0117 }
0118
0119
0120 public boolean hasIntArraysColumn() {
0121 return fieldSetFlags()[0];
0122 }
0123
0124
0125 public org.apache.spark.sql.execution.datasources.parquet.test.avro.AvroArrayOfArray.Builder clearIntArraysColumn() {
0126 int_arrays_column = null;
0127 fieldSetFlags()[0] = false;
0128 return this;
0129 }
0130
0131 @Override
0132 @SuppressWarnings(value="unchecked")
0133 public AvroArrayOfArray build() {
0134 try {
0135 AvroArrayOfArray record = new AvroArrayOfArray();
0136 record.int_arrays_column = fieldSetFlags()[0] ? this.int_arrays_column : (java.util.List<java.util.List<java.lang.Integer>>) defaultValue(fields()[0]);
0137 return record;
0138 } catch (Exception e) {
0139 throw new org.apache.avro.AvroRuntimeException(e);
0140 }
0141 }
0142 }
0143 }