internal JavaMap(JMap _enclosing, JType.JavaType key, JType.JavaType value) : base(_enclosing) { this._enclosing = _enclosing; this.key = key; this.value = value; }
internal JavaVector(JVector _enclosing, JType.JavaType t) : base(_enclosing) { this._enclosing = _enclosing; this.element = t; }
/// <exception cref="System.IO.IOException"/> internal virtual void GenCode(string destDir, AList <string> options) { string pkg = this.module; string pkgpath = pkg.ReplaceAll("\\.", "/"); FilePath pkgdir = new FilePath(destDir, pkgpath); FilePath jfile = new FilePath(pkgdir, this.name + ".java"); if (!pkgdir.Exists()) { // create the pkg directory bool ret = pkgdir.Mkdirs(); if (!ret) { throw new IOException("Cannnot create directory: " + pkgpath); } } else { if (!pkgdir.IsDirectory()) { // not a directory throw new IOException(pkgpath + " is not a directory."); } } CodeBuffer cb = new CodeBuffer(); cb.Append("// File generated by hadoop record compiler. Do not edit.\n"); cb.Append("package " + this.module + ";\n\n"); cb.Append("public class " + this.name + " extends org.apache.hadoop.record.Record {\n" ); // type information declarations cb.Append("private static final " + "org.apache.hadoop.record.meta.RecordTypeInfo " + Consts.RtiVar + ";\n"); cb.Append("private static " + "org.apache.hadoop.record.meta.RecordTypeInfo " + Consts .RtiFilter + ";\n"); cb.Append("private static int[] " + Consts.RtiFilterFields + ";\n"); // static init for type information cb.Append("static {\n"); cb.Append(Consts.RtiVar + " = " + "new org.apache.hadoop.record.meta.RecordTypeInfo(\"" + this.name + "\");\n"); for (IEnumerator <JField <JType.JavaType> > i = this.fields.GetEnumerator(); i.HasNext ();) { JField <JType.JavaType> jf = i.Next(); string name = jf.GetName(); JType.JavaType type = jf.GetType(); type.GenStaticTypeInfo(cb, name); } cb.Append("}\n\n"); // field definitions for (IEnumerator <JField <JType.JavaType> > i_1 = this.fields.GetEnumerator(); i_1.HasNext ();) { JField <JType.JavaType> jf = i_1.Next(); string name = jf.GetName(); JType.JavaType type = jf.GetType(); type.GenDecl(cb, name); } // default constructor cb.Append("public " + this.name + "() { }\n"); // constructor cb.Append("public " + this.name + "(\n"); int fIdx = 0; for (IEnumerator <JField <JType.JavaType> > i_2 = this.fields.GetEnumerator(); i_2.HasNext (); fIdx++) { JField <JType.JavaType> jf = i_2.Next(); string name = jf.GetName(); JType.JavaType type = jf.GetType(); type.GenConstructorParam(cb, name); cb.Append((!i_2.HasNext()) ? string.Empty : ",\n"); } cb.Append(") {\n"); fIdx = 0; for (IEnumerator <JField <JType.JavaType> > i_3 = this.fields.GetEnumerator(); i_3.HasNext (); fIdx++) { JField <JType.JavaType> jf = i_3.Next(); string name = jf.GetName(); JType.JavaType type = jf.GetType(); type.GenConstructorSet(cb, name); } cb.Append("}\n"); // getter/setter for type info cb.Append("public static org.apache.hadoop.record.meta.RecordTypeInfo" + " getTypeInfo() {\n" ); cb.Append("return " + Consts.RtiVar + ";\n"); cb.Append("}\n"); cb.Append("public static void setTypeFilter(" + "org.apache.hadoop.record.meta.RecordTypeInfo rti) {\n" ); cb.Append("if (null == rti) return;\n"); cb.Append(Consts.RtiFilter + " = rti;\n"); cb.Append(Consts.RtiFilterFields + " = null;\n"); // set RTIFilter for nested structs. // To prevent setting up the type filter for the same struct more than once, // we use a hash map to keep track of what we've set. IDictionary <string, int> nestedStructMap = new Dictionary <string, int>(); foreach (JField <JType.JavaType> jf_1 in this.fields) { JType.JavaType type = jf_1.GetType(); type.GenSetRTIFilter(cb, nestedStructMap); } cb.Append("}\n"); // setupRtiFields() this.GenSetupRtiFields(cb); // getters/setters for member variables for (IEnumerator <JField <JType.JavaType> > i_4 = this.fields.GetEnumerator(); i_4.HasNext ();) { JField <JType.JavaType> jf = i_4.Next(); string name = jf_1.GetName(); JType.JavaType type = jf_1.GetType(); type.GenGetSet(cb, name); } // serialize() cb.Append("public void serialize(" + "final org.apache.hadoop.record.RecordOutput " + Consts.RecordOutput + ", final String " + Consts.Tag + ")\n" + "throws java.io.IOException {\n" ); cb.Append(Consts.RecordOutput + ".startRecord(this," + Consts.Tag + ");\n"); for (IEnumerator <JField <JType.JavaType> > i_5 = this.fields.GetEnumerator(); i_5.HasNext ();) { JField <JType.JavaType> jf = i_5.Next(); string name = jf_1.GetName(); JType.JavaType type = jf_1.GetType(); type.GenWriteMethod(cb, name, name); } cb.Append(Consts.RecordOutput + ".endRecord(this," + Consts.Tag + ");\n"); cb.Append("}\n"); // deserializeWithoutFilter() cb.Append("private void deserializeWithoutFilter(" + "final org.apache.hadoop.record.RecordInput " + Consts.RecordInput + ", final String " + Consts.Tag + ")\n" + "throws java.io.IOException {\n" ); cb.Append(Consts.RecordInput + ".startRecord(" + Consts.Tag + ");\n"); for (IEnumerator <JField <JType.JavaType> > i_6 = this.fields.GetEnumerator(); i_6.HasNext ();) { JField <JType.JavaType> jf = i_6.Next(); string name = jf_1.GetName(); JType.JavaType type = jf_1.GetType(); type.GenReadMethod(cb, name, name, false); } cb.Append(Consts.RecordInput + ".endRecord(" + Consts.Tag + ");\n"); cb.Append("}\n"); // deserialize() cb.Append("public void deserialize(final " + "org.apache.hadoop.record.RecordInput " + Consts.RecordInput + ", final String " + Consts.Tag + ")\n" + "throws java.io.IOException {\n" ); cb.Append("if (null == " + Consts.RtiFilter + ") {\n"); cb.Append("deserializeWithoutFilter(" + Consts.RecordInput + ", " + Consts.Tag + ");\n"); cb.Append("return;\n"); cb.Append("}\n"); cb.Append("// if we're here, we need to read based on version info\n"); cb.Append(Consts.RecordInput + ".startRecord(" + Consts.Tag + ");\n"); cb.Append("setupRtiFields();\n"); cb.Append("for (int " + Consts.RioPrefix + "i=0; " + Consts.RioPrefix + "i<" + Consts .RtiFilter + ".getFieldTypeInfos().size(); " + Consts.RioPrefix + "i++) {\n"); int ct = 0; for (IEnumerator <JField <JType.JavaType> > i_7 = this.fields.GetEnumerator(); i_7.HasNext ();) { JField <JType.JavaType> jf = i_7.Next(); string name = jf_1.GetName(); JType.JavaType type = jf_1.GetType(); ct++; if (1 != ct) { cb.Append("else "); } cb.Append("if (" + ct + " == " + Consts.RtiFilterFields + "[" + Consts.RioPrefix + "i]) {\n"); type.GenReadMethod(cb, name, name, false); cb.Append("}\n"); } if (0 != ct) { cb.Append("else {\n"); cb.Append("java.util.ArrayList<" + "org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = " + "(java.util.ArrayList<" + "org.apache.hadoop.record.meta.FieldTypeInfo>)" + "(" + Consts.RtiFilter + ".getFieldTypeInfos());\n"); cb.Append("org.apache.hadoop.record.meta.Utils.skip(" + Consts.RecordInput + ", " + "typeInfos.get(" + Consts.RioPrefix + "i).getFieldID(), typeInfos.get(" + Consts .RioPrefix + "i).getTypeID());\n"); cb.Append("}\n"); } cb.Append("}\n"); cb.Append(Consts.RecordInput + ".endRecord(" + Consts.Tag + ");\n"); cb.Append("}\n"); // compareTo() cb.Append("public int compareTo (final Object " + Consts.RioPrefix + "peer_) throws ClassCastException {\n" ); cb.Append("if (!(" + Consts.RioPrefix + "peer_ instanceof " + this.name + ")) {\n" ); cb.Append("throw new ClassCastException(\"Comparing different types of records.\");\n" ); cb.Append("}\n"); cb.Append(this.name + " " + Consts.RioPrefix + "peer = (" + this.name + ") " + Consts .RioPrefix + "peer_;\n"); cb.Append("int " + Consts.RioPrefix + "ret = 0;\n"); for (IEnumerator <JField <JType.JavaType> > i_8 = this.fields.GetEnumerator(); i_8.HasNext ();) { JField <JType.JavaType> jf = i_8.Next(); string name = jf_1.GetName(); JType.JavaType type = jf_1.GetType(); type.GenCompareTo(cb, name, Consts.RioPrefix + "peer." + name); cb.Append("if (" + Consts.RioPrefix + "ret != 0) return " + Consts.RioPrefix + "ret;\n" ); } cb.Append("return " + Consts.RioPrefix + "ret;\n"); cb.Append("}\n"); // equals() cb.Append("public boolean equals(final Object " + Consts.RioPrefix + "peer_) {\n" ); cb.Append("if (!(" + Consts.RioPrefix + "peer_ instanceof " + this.name + ")) {\n" ); cb.Append("return false;\n"); cb.Append("}\n"); cb.Append("if (" + Consts.RioPrefix + "peer_ == this) {\n"); cb.Append("return true;\n"); cb.Append("}\n"); cb.Append(this.name + " " + Consts.RioPrefix + "peer = (" + this.name + ") " + Consts .RioPrefix + "peer_;\n"); cb.Append("boolean " + Consts.RioPrefix + "ret = false;\n"); for (IEnumerator <JField <JType.JavaType> > i_9 = this.fields.GetEnumerator(); i_9.HasNext ();) { JField <JType.JavaType> jf = i_9.Next(); string name = jf_1.GetName(); JType.JavaType type = jf_1.GetType(); type.GenEquals(cb, name, Consts.RioPrefix + "peer." + name); cb.Append("if (!" + Consts.RioPrefix + "ret) return " + Consts.RioPrefix + "ret;\n" ); } cb.Append("return " + Consts.RioPrefix + "ret;\n"); cb.Append("}\n"); // clone() cb.Append("public Object clone() throws CloneNotSupportedException {\n"); cb.Append(this.name + " " + Consts.RioPrefix + "other = new " + this.name + "();\n" ); for (IEnumerator <JField <JType.JavaType> > i_10 = this.fields.GetEnumerator(); i_10 .HasNext();) { JField <JType.JavaType> jf = i_10.Next(); string name = jf_1.GetName(); JType.JavaType type = jf_1.GetType(); type.GenClone(cb, name); } cb.Append("return " + Consts.RioPrefix + "other;\n"); cb.Append("}\n"); cb.Append("public int hashCode() {\n"); cb.Append("int " + Consts.RioPrefix + "result = 17;\n"); cb.Append("int " + Consts.RioPrefix + "ret;\n"); for (IEnumerator <JField <JType.JavaType> > i_11 = this.fields.GetEnumerator(); i_11 .HasNext();) { JField <JType.JavaType> jf = i_11.Next(); string name = jf_1.GetName(); JType.JavaType type = jf_1.GetType(); type.GenHashCode(cb, name); cb.Append(Consts.RioPrefix + "result = 37*" + Consts.RioPrefix + "result + " + Consts .RioPrefix + "ret;\n"); } cb.Append("return " + Consts.RioPrefix + "result;\n"); cb.Append("}\n"); cb.Append("public static String signature() {\n"); cb.Append("return \"" + this._enclosing.GetSignature() + "\";\n"); cb.Append("}\n"); cb.Append("public static class Comparator extends" + " org.apache.hadoop.record.RecordComparator {\n" ); cb.Append("public Comparator() {\n"); cb.Append("super(" + this.name + ".class);\n"); cb.Append("}\n"); cb.Append("static public int slurpRaw(byte[] b, int s, int l) {\n"); cb.Append("try {\n"); cb.Append("int os = s;\n"); for (IEnumerator <JField <JType.JavaType> > i_12 = this.fields.GetEnumerator(); i_12 .HasNext();) { JField <JType.JavaType> jf = i_12.Next(); string name = jf_1.GetName(); JType.JavaType type = jf_1.GetType(); type.GenSlurpBytes(cb, "b", "s", "l"); } cb.Append("return (os - s);\n"); cb.Append("} catch(java.io.IOException e) {\n"); cb.Append("throw new RuntimeException(e);\n"); cb.Append("}\n"); cb.Append("}\n"); cb.Append("static public int compareRaw(byte[] b1, int s1, int l1,\n"); cb.Append(" byte[] b2, int s2, int l2) {\n"); cb.Append("try {\n"); cb.Append("int os1 = s1;\n"); for (IEnumerator <JField <JType.JavaType> > i_13 = this.fields.GetEnumerator(); i_13 .HasNext();) { JField <JType.JavaType> jf = i_13.Next(); string name = jf_1.GetName(); JType.JavaType type = jf_1.GetType(); type.GenCompareBytes(cb); } cb.Append("return (os1 - s1);\n"); cb.Append("} catch(java.io.IOException e) {\n"); cb.Append("throw new RuntimeException(e);\n"); cb.Append("}\n"); cb.Append("}\n"); cb.Append("public int compare(byte[] b1, int s1, int l1,\n"); cb.Append(" byte[] b2, int s2, int l2) {\n"); cb.Append("int ret = compareRaw(b1,s1,l1,b2,s2,l2);\n"); cb.Append("return (ret == -1)? -1 : ((ret==0)? 1 : 0);"); cb.Append("}\n"); cb.Append("}\n\n"); cb.Append("static {\n"); cb.Append("org.apache.hadoop.record.RecordComparator.define(" + this.name + ".class, new Comparator());\n" ); cb.Append("}\n"); cb.Append("}\n"); FileWriter jj = new FileWriter(jfile); try { jj.Write(cb.ToString()); } finally { jj.Close(); } }
internal virtual void SetJavaType(JType.JavaType jType) { this.javaType = jType; }