Back to home page

OSCL-LXR

 
 

    


0001 /**
0002  * Autogenerated by Avro
0003  * 
0004  * DO NOT EDIT DIRECTLY
0005  */
0006 package org.apache.spark.sql.execution.datasources.parquet.test.avro;  
0007 @SuppressWarnings("all")
0008 @org.apache.avro.specific.AvroGenerated
0009 public class ParquetAvroCompat extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
0010   public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"namespace\":\"org.apache.spark.sql.execution.datasources.parquet.test.avro\",\"fields\":[{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}},\"avro.java.string\":\"String\"}}]}");
0011   public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
0012   @Deprecated public java.util.List<java.lang.String> strings_column;
0013   @Deprecated public java.util.Map<java.lang.String,java.lang.Integer> string_to_int_column;
0014   @Deprecated public java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>> complex_column;
0015 
0016   /**
0017    * Default constructor.  Note that this does not initialize fields
0018    * to their default values from the schema.  If that is desired then
0019    * one should use <code>newBuilder()</code>. 
0020    */
0021   public ParquetAvroCompat() {}
0022 
0023   /**
0024    * All-args constructor.
0025    */
0026   public ParquetAvroCompat(java.util.List<java.lang.String> strings_column, java.util.Map<java.lang.String,java.lang.Integer> string_to_int_column, java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>> complex_column) {
0027     this.strings_column = strings_column;
0028     this.string_to_int_column = string_to_int_column;
0029     this.complex_column = complex_column;
0030   }
0031 
0032   public org.apache.avro.Schema getSchema() { return SCHEMA$; }
0033   // Used by DatumWriter.  Applications should not call. 
0034   public java.lang.Object get(int field$) {
0035     switch (field$) {
0036     case 0: return strings_column;
0037     case 1: return string_to_int_column;
0038     case 2: return complex_column;
0039     default: throw new org.apache.avro.AvroRuntimeException("Bad index");
0040     }
0041   }
0042   // Used by DatumReader.  Applications should not call. 
0043   @SuppressWarnings(value="unchecked")
0044   public void put(int field$, java.lang.Object value$) {
0045     switch (field$) {
0046     case 0: strings_column = (java.util.List<java.lang.String>)value$; break;
0047     case 1: string_to_int_column = (java.util.Map<java.lang.String,java.lang.Integer>)value$; break;
0048     case 2: complex_column = (java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>>)value$; break;
0049     default: throw new org.apache.avro.AvroRuntimeException("Bad index");
0050     }
0051   }
0052 
0053   /**
0054    * Gets the value of the 'strings_column' field.
0055    */
0056   public java.util.List<java.lang.String> getStringsColumn() {
0057     return strings_column;
0058   }
0059 
0060   /**
0061    * Sets the value of the 'strings_column' field.
0062    * @param value the value to set.
0063    */
0064   public void setStringsColumn(java.util.List<java.lang.String> value) {
0065     this.strings_column = value;
0066   }
0067 
0068   /**
0069    * Gets the value of the 'string_to_int_column' field.
0070    */
0071   public java.util.Map<java.lang.String,java.lang.Integer> getStringToIntColumn() {
0072     return string_to_int_column;
0073   }
0074 
0075   /**
0076    * Sets the value of the 'string_to_int_column' field.
0077    * @param value the value to set.
0078    */
0079   public void setStringToIntColumn(java.util.Map<java.lang.String,java.lang.Integer> value) {
0080     this.string_to_int_column = value;
0081   }
0082 
0083   /**
0084    * Gets the value of the 'complex_column' field.
0085    */
0086   public java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>> getComplexColumn() {
0087     return complex_column;
0088   }
0089 
0090   /**
0091    * Sets the value of the 'complex_column' field.
0092    * @param value the value to set.
0093    */
0094   public void setComplexColumn(java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>> value) {
0095     this.complex_column = value;
0096   }
0097 
0098   /** Creates a new ParquetAvroCompat RecordBuilder */
0099   public static org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder newBuilder() {
0100     return new org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder();
0101   }
0102   
0103   /** Creates a new ParquetAvroCompat RecordBuilder by copying an existing Builder */
0104   public static org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder newBuilder(org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder other) {
0105     return new org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder(other);
0106   }
0107   
0108   /** Creates a new ParquetAvroCompat RecordBuilder by copying an existing ParquetAvroCompat instance */
0109   public static org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder newBuilder(org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat other) {
0110     return new org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder(other);
0111   }
0112   
0113   /**
0114    * RecordBuilder for ParquetAvroCompat instances.
0115    */
0116   public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<ParquetAvroCompat>
0117     implements org.apache.avro.data.RecordBuilder<ParquetAvroCompat> {
0118 
0119     private java.util.List<java.lang.String> strings_column;
0120     private java.util.Map<java.lang.String,java.lang.Integer> string_to_int_column;
0121     private java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>> complex_column;
0122 
0123     /** Creates a new Builder */
0124     private Builder() {
0125       super(org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.SCHEMA$);
0126     }
0127     
0128     /** Creates a Builder by copying an existing Builder */
0129     private Builder(org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder other) {
0130       super(other);
0131       if (isValidValue(fields()[0], other.strings_column)) {
0132         this.strings_column = data().deepCopy(fields()[0].schema(), other.strings_column);
0133         fieldSetFlags()[0] = true;
0134       }
0135       if (isValidValue(fields()[1], other.string_to_int_column)) {
0136         this.string_to_int_column = data().deepCopy(fields()[1].schema(), other.string_to_int_column);
0137         fieldSetFlags()[1] = true;
0138       }
0139       if (isValidValue(fields()[2], other.complex_column)) {
0140         this.complex_column = data().deepCopy(fields()[2].schema(), other.complex_column);
0141         fieldSetFlags()[2] = true;
0142       }
0143     }
0144     
0145     /** Creates a Builder by copying an existing ParquetAvroCompat instance */
0146     private Builder(org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat other) {
0147             super(org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.SCHEMA$);
0148       if (isValidValue(fields()[0], other.strings_column)) {
0149         this.strings_column = data().deepCopy(fields()[0].schema(), other.strings_column);
0150         fieldSetFlags()[0] = true;
0151       }
0152       if (isValidValue(fields()[1], other.string_to_int_column)) {
0153         this.string_to_int_column = data().deepCopy(fields()[1].schema(), other.string_to_int_column);
0154         fieldSetFlags()[1] = true;
0155       }
0156       if (isValidValue(fields()[2], other.complex_column)) {
0157         this.complex_column = data().deepCopy(fields()[2].schema(), other.complex_column);
0158         fieldSetFlags()[2] = true;
0159       }
0160     }
0161 
0162     /** Gets the value of the 'strings_column' field */
0163     public java.util.List<java.lang.String> getStringsColumn() {
0164       return strings_column;
0165     }
0166     
0167     /** Sets the value of the 'strings_column' field */
0168     public org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder setStringsColumn(java.util.List<java.lang.String> value) {
0169       validate(fields()[0], value);
0170       this.strings_column = value;
0171       fieldSetFlags()[0] = true;
0172       return this; 
0173     }
0174     
0175     /** Checks whether the 'strings_column' field has been set */
0176     public boolean hasStringsColumn() {
0177       return fieldSetFlags()[0];
0178     }
0179     
0180     /** Clears the value of the 'strings_column' field */
0181     public org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder clearStringsColumn() {
0182       strings_column = null;
0183       fieldSetFlags()[0] = false;
0184       return this;
0185     }
0186 
0187     /** Gets the value of the 'string_to_int_column' field */
0188     public java.util.Map<java.lang.String,java.lang.Integer> getStringToIntColumn() {
0189       return string_to_int_column;
0190     }
0191     
0192     /** Sets the value of the 'string_to_int_column' field */
0193     public org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder setStringToIntColumn(java.util.Map<java.lang.String,java.lang.Integer> value) {
0194       validate(fields()[1], value);
0195       this.string_to_int_column = value;
0196       fieldSetFlags()[1] = true;
0197       return this; 
0198     }
0199     
0200     /** Checks whether the 'string_to_int_column' field has been set */
0201     public boolean hasStringToIntColumn() {
0202       return fieldSetFlags()[1];
0203     }
0204     
0205     /** Clears the value of the 'string_to_int_column' field */
0206     public org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder clearStringToIntColumn() {
0207       string_to_int_column = null;
0208       fieldSetFlags()[1] = false;
0209       return this;
0210     }
0211 
0212     /** Gets the value of the 'complex_column' field */
0213     public java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>> getComplexColumn() {
0214       return complex_column;
0215     }
0216     
0217     /** Sets the value of the 'complex_column' field */
0218     public org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder setComplexColumn(java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>> value) {
0219       validate(fields()[2], value);
0220       this.complex_column = value;
0221       fieldSetFlags()[2] = true;
0222       return this; 
0223     }
0224     
0225     /** Checks whether the 'complex_column' field has been set */
0226     public boolean hasComplexColumn() {
0227       return fieldSetFlags()[2];
0228     }
0229     
0230     /** Clears the value of the 'complex_column' field */
0231     public org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder clearComplexColumn() {
0232       complex_column = null;
0233       fieldSetFlags()[2] = false;
0234       return this;
0235     }
0236 
0237     @Override
0238     @SuppressWarnings(value="unchecked")
0239     public ParquetAvroCompat build() {
0240       try {
0241         ParquetAvroCompat record = new ParquetAvroCompat();
0242         record.strings_column = fieldSetFlags()[0] ? this.strings_column : (java.util.List<java.lang.String>) defaultValue(fields()[0]);
0243         record.string_to_int_column = fieldSetFlags()[1] ? this.string_to_int_column : (java.util.Map<java.lang.String,java.lang.Integer>) defaultValue(fields()[1]);
0244         record.complex_column = fieldSetFlags()[2] ? this.complex_column : (java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>>) defaultValue(fields()[2]);
0245         return record;
0246       } catch (Exception e) {
0247         throw new org.apache.avro.AvroRuntimeException(e);
0248       }
0249     }
0250   }
0251 }