001 /**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements. See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership. The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License. You may obtain a copy of the License at
009 *
010 * http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018
019 package org.apache.hadoop.record.compiler;
020
021 import java.util.Map;
022
023 import org.apache.hadoop.classification.InterfaceAudience;
024 import org.apache.hadoop.classification.InterfaceStability;
025
026 /**
027 * @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
028 */
029 @Deprecated
030 @InterfaceAudience.Public
031 @InterfaceStability.Stable
032 public class JVector extends JCompType {
033
034 static private int level = 0;
035
036 static private String getId(String id) { return id+getLevel(); }
037
038 static private String getLevel() { return Integer.toString(level); }
039
040 static private void incrLevel() { level++; }
041
042 static private void decrLevel() { level--; }
043
044 private JType type;
045
046 class JavaVector extends JavaCompType {
047
048 private JType.JavaType element;
049
050 JavaVector(JType.JavaType t) {
051 super("java.util.ArrayList<"+t.getWrapperType()+">",
052 "Vector", "java.util.ArrayList<"+t.getWrapperType()+">",
053 "TypeID.RIOType.VECTOR");
054 element = t;
055 }
056
057 String getTypeIDObjectString() {
058 return "new org.apache.hadoop.record.meta.VectorTypeID(" +
059 element.getTypeIDObjectString() + ")";
060 }
061
062 void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
063 element.genSetRTIFilter(cb, nestedStructMap);
064 }
065
066 void genCompareTo(CodeBuffer cb, String fname, String other) {
067 cb.append("{\n");
068 incrLevel();
069 cb.append("int "+getId(Consts.RIO_PREFIX + "len1")+" = "+fname+
070 ".size();\n");
071 cb.append("int "+getId(Consts.RIO_PREFIX + "len2")+" = "+other+
072 ".size();\n");
073 cb.append("for(int "+getId(Consts.RIO_PREFIX + "vidx")+" = 0; "+
074 getId(Consts.RIO_PREFIX + "vidx")+"<"+getId(Consts.RIO_PREFIX + "len1")+
075 " && "+getId(Consts.RIO_PREFIX + "vidx")+"<"+
076 getId(Consts.RIO_PREFIX + "len2")+"; "+
077 getId(Consts.RIO_PREFIX + "vidx")+"++) {\n");
078 cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e1")+
079 " = "+fname+
080 ".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
081 cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e2")+
082 " = "+other+
083 ".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
084 element.genCompareTo(cb, getId(Consts.RIO_PREFIX + "e1"),
085 getId(Consts.RIO_PREFIX + "e2"));
086 cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) { return " +
087 Consts.RIO_PREFIX + "ret; }\n");
088 cb.append("}\n");
089 cb.append(Consts.RIO_PREFIX + "ret = ("+getId(Consts.RIO_PREFIX + "len1")+
090 " - "+getId(Consts.RIO_PREFIX + "len2")+");\n");
091 decrLevel();
092 cb.append("}\n");
093 }
094
095 void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
096 if (decl) {
097 cb.append(getType()+" "+fname+";\n");
098 }
099 cb.append("{\n");
100 incrLevel();
101 cb.append("org.apache.hadoop.record.Index "+
102 getId(Consts.RIO_PREFIX + "vidx")+" = " +
103 Consts.RECORD_INPUT + ".startVector(\""+tag+"\");\n");
104 cb.append(fname+"=new "+getType()+"();\n");
105 cb.append("for (; !"+getId(Consts.RIO_PREFIX + "vidx")+".done(); " +
106 getId(Consts.RIO_PREFIX + "vidx")+".incr()) {\n");
107 element.genReadMethod(cb, getId(Consts.RIO_PREFIX + "e"),
108 getId(Consts.RIO_PREFIX + "e"), true);
109 cb.append(fname+".add("+getId(Consts.RIO_PREFIX + "e")+");\n");
110 cb.append("}\n");
111 cb.append(Consts.RECORD_INPUT + ".endVector(\""+tag+"\");\n");
112 decrLevel();
113 cb.append("}\n");
114 }
115
116 void genWriteMethod(CodeBuffer cb, String fname, String tag) {
117 cb.append("{\n");
118 incrLevel();
119 cb.append(Consts.RECORD_OUTPUT + ".startVector("+fname+",\""+tag+"\");\n");
120 cb.append("int "+getId(Consts.RIO_PREFIX + "len")+" = "+fname+".size();\n");
121 cb.append("for(int "+getId(Consts.RIO_PREFIX + "vidx")+" = 0; " +
122 getId(Consts.RIO_PREFIX + "vidx")+"<"+getId(Consts.RIO_PREFIX + "len")+
123 "; "+getId(Consts.RIO_PREFIX + "vidx")+"++) {\n");
124 cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e")+" = "+
125 fname+".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
126 element.genWriteMethod(cb, getId(Consts.RIO_PREFIX + "e"),
127 getId(Consts.RIO_PREFIX + "e"));
128 cb.append("}\n");
129 cb.append(Consts.RECORD_OUTPUT + ".endVector("+fname+",\""+tag+"\");\n");
130 cb.append("}\n");
131 decrLevel();
132 }
133
134 void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
135 cb.append("{\n");
136 incrLevel();
137 cb.append("int "+getId("vi")+
138 " = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
139 cb.append("int "+getId("vz")+
140 " = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi")+");\n");
141 cb.append(s+"+="+getId("vz")+"; "+l+"-="+getId("vz")+";\n");
142 cb.append("for (int "+getId("vidx")+" = 0; "+getId("vidx")+
143 " < "+getId("vi")+"; "+getId("vidx")+"++)");
144 element.genSlurpBytes(cb, b, s, l);
145 decrLevel();
146 cb.append("}\n");
147 }
148
149 void genCompareBytes(CodeBuffer cb) {
150 cb.append("{\n");
151 incrLevel();
152 cb.append("int "+getId("vi1")+
153 " = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
154 cb.append("int "+getId("vi2")+
155 " = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
156 cb.append("int "+getId("vz1")+
157 " = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi1")+");\n");
158 cb.append("int "+getId("vz2")+
159 " = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi2")+");\n");
160 cb.append("s1+="+getId("vz1")+"; s2+="+getId("vz2")+
161 "; l1-="+getId("vz1")+"; l2-="+getId("vz2")+";\n");
162 cb.append("for (int "+getId("vidx")+" = 0; "+getId("vidx")+
163 " < "+getId("vi1")+" && "+getId("vidx")+" < "+getId("vi2")+
164 "; "+getId("vidx")+"++)");
165 element.genCompareBytes(cb);
166 cb.append("if ("+getId("vi1")+" != "+getId("vi2")+
167 ") { return ("+getId("vi1")+"<"+getId("vi2")+")?-1:0; }\n");
168 decrLevel();
169 cb.append("}\n");
170 }
171 }
172
173 class CppVector extends CppCompType {
174
175 private JType.CppType element;
176
177 CppVector(JType.CppType t) {
178 super("::std::vector< "+t.getType()+" >");
179 element = t;
180 }
181
182 String getTypeIDObjectString() {
183 return "new ::hadoop::VectorTypeID(" +
184 element.getTypeIDObjectString() + ")";
185 }
186
187 void genSetRTIFilter(CodeBuffer cb) {
188 element.genSetRTIFilter(cb);
189 }
190
191 }
192
193 /** Creates a new instance of JVector */
194 public JVector(JType t) {
195 type = t;
196 setJavaType(new JavaVector(t.getJavaType()));
197 setCppType(new CppVector(t.getCppType()));
198 setCType(new CCompType());
199 }
200
201 String getSignature() {
202 return "[" + type.getSignature() + "]";
203 }
204 }