Skip to content

Commit 4923efe

Browse files
nikita15prgoers
authored andcommitted
FLUME-3448 fixed unit test errors related to bump up
Signed-off-by: nikita15p <[email protected]>
1 parent 3c27b3e commit 4923efe

File tree

5 files changed

+24
-23
lines changed

5 files changed

+24
-23
lines changed

flume-ng-clients/flume-ng-log4jappender/src/test/java/org/apache/flume/clients/log4jappender/TestLog4jAppenderWithAvro.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,7 @@ private static int getFreePort() throws Exception {
7171
@Before
7272
public void setUp() throws Exception {
7373
URL schemaUrl = getClass().getClassLoader().getResource("myrecord.avsc");
74-
Files.copy(Resources.newInputStreamSupplier(schemaUrl),
75-
new File("/tmp/myrecord.avsc"));
74+
Resources.asByteSource(schemaUrl).copyTo(Files.asByteSink(new File("/tmp/myrecord.avsc")));
7675

7776
port = getFreePort();
7877
source = new AvroSource();

flume-ng-core/src/main/java/org/apache/flume/source/SyslogParser.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
package org.apache.flume.source;
2424

2525
import com.google.common.base.Preconditions;
26-
import com.google.common.cache.Cache;
26+
import com.google.common.cache.LoadingCache;
2727
import com.google.common.cache.CacheBuilder;
2828
import com.google.common.cache.CacheLoader;
2929
import com.google.common.collect.Maps;
@@ -59,7 +59,7 @@ public class SyslogParser {
5959
private static final int RFC5424_PREFIX_LEN = 19;
6060
private final DateTimeFormatter timeParser;
6161

62-
private Cache<String, Long> timestampCache;
62+
private LoadingCache<String, Long> timestampCache;
6363

6464
public SyslogParser() {
6565
timeParser = DateTimeFormat.forPattern(timePat).withZoneUTC();

flume-ng-sinks/flume-hive-sink/src/test/java/org/apache/flume/sink/hive/TestHiveSink.java

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737
import org.apache.hadoop.hive.conf.HiveConf;
3838
import org.apache.hadoop.hive.metastore.api.MetaException;
3939
import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
40-
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
4140
import org.apache.hadoop.hive.ql.Driver;
4241
import org.apache.hadoop.hive.ql.metadata.HiveException;
4342
import org.apache.hadoop.hive.ql.session.SessionState;
@@ -105,8 +104,8 @@ public TestHiveSink() throws Exception {
105104
TestUtil.setConfValues(conf);
106105

107106
// 1) prepare hive
108-
TxnDbUtil.cleanDb();
109-
TxnDbUtil.prepDb();
107+
TxnDbUtil.cleanDb(conf);
108+
TxnDbUtil.prepDb(conf);
110109

111110
// 2) Setup Hive client
112111
SessionState.start(new CliSessionState(conf));
@@ -283,7 +282,7 @@ public void testSingleWriterUseHeaders()
283282

284283
@Test
285284
public void testHeartBeat()
286-
throws EventDeliveryException, IOException, CommandNeedRetryException {
285+
throws EventDeliveryException, IOException {
287286
int batchSize = 2;
288287
int batchCount = 3;
289288
int totalRecords = batchCount * batchSize;
@@ -407,7 +406,7 @@ private static Channel startSink(HiveSink sink, Context context, Channel pChanne
407406
}
408407

409408
private void checkRecordCountInTable(int expectedCount, String db, String tbl)
410-
throws CommandNeedRetryException, IOException {
409+
throws IOException {
411410
int count = TestUtil.listRecordsInTable(driver, db, tbl).size();
412411
Assert.assertEquals(expectedCount, count);
413412
}

flume-ng-sinks/flume-hive-sink/src/test/java/org/apache/flume/sink/hive/TestHiveWriter.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@
2626
import org.apache.flume.instrumentation.SinkCounter;
2727
import org.apache.hadoop.hive.cli.CliSessionState;
2828
import org.apache.hadoop.hive.conf.HiveConf;
29+
import org.apache.hadoop.hive.metastore.HiveMetaStore;
2930
import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
30-
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
3131
import org.apache.hadoop.hive.ql.Driver;
3232
import org.apache.hadoop.hive.ql.session.SessionState;
3333
import org.apache.hive.hcatalog.streaming.HiveEndPoint;
@@ -99,8 +99,8 @@ public TestHiveWriter() throws Exception {
9999
@Before
100100
public void setUp() throws Exception {
101101
// 1) prepare hive
102-
TxnDbUtil.cleanDb();
103-
TxnDbUtil.prepDb();
102+
TxnDbUtil.cleanDb(conf);
103+
TxnDbUtil.prepDb(conf);
104104

105105
// 1) Setup tables
106106
TestUtil.dropDB(conf, dbName);
@@ -207,7 +207,7 @@ public void testTxnBatchConsumption() throws Exception {
207207
}
208208

209209
private void checkRecordCountInTable(int expectedCount)
210-
throws CommandNeedRetryException, IOException {
210+
throws IOException {
211211
int count = TestUtil.listRecordsInTable(driver, dbName, tblName).size();
212212
Assert.assertEquals(expectedCount, count);
213213
}

flume-ng-sinks/flume-hive-sink/src/test/java/org/apache/flume/sink/hive/TestUtil.java

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@
2828
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
2929
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
3030
import org.apache.hadoop.hive.metastore.api.MetaException;
31-
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
3231
import org.apache.hadoop.hive.ql.Driver;
3332
import org.apache.hadoop.hive.ql.metadata.HiveException;
3433
import org.apache.hadoop.hive.shims.ShimLoader;
@@ -58,6 +57,17 @@ public static void setConfValues(HiveConf conf) {
5857
conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, txnMgr);
5958
conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true);
6059
conf.set("fs.raw.impl", RawFileSystem.class.getName());
60+
try{
61+
conf.setBoolVar(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION, false );
62+
conf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, "jdbc:derby:;databaseName=metastore_db;create=true");
63+
conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_DRIVER, "org.apache.derby.jdbc.EmbeddedDriver");
64+
conf.setBoolVar(HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL, true);
65+
conf.setIntVar(HiveConf.ConfVars.METASTORE_SERVER_PORT, 0);
66+
conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("java.io.tmpdir"));
67+
}catch (Throwable t){
68+
t.printStackTrace();
69+
}
70+
6171
}
6272

6373
public static void createDbAndTable(Driver driver, String databaseName,
@@ -148,7 +158,7 @@ private static String getTablePartsStr2(String[] partNames, List<String> partVal
148158
}
149159

150160
public static ArrayList<String> listRecordsInTable(Driver driver, String dbName, String tblName)
151-
throws CommandNeedRetryException, IOException {
161+
throws IOException {
152162
driver.run("select * from " + dbName + "." + tblName);
153163
ArrayList<String> res = new ArrayList<String>();
154164
driver.getResults(res);
@@ -158,7 +168,7 @@ public static ArrayList<String> listRecordsInTable(Driver driver, String dbName,
158168
public static ArrayList<String> listRecordsInPartition(Driver driver, String dbName,
159169
String tblName, String continent,
160170
String country)
161-
throws CommandNeedRetryException, IOException {
171+
throws IOException {
162172
driver.run("select * from " + dbName + "." + tblName + " where continent='"
163173
+ continent + "' and country='" + country + "'");
164174
ArrayList<String> res = new ArrayList<String>();
@@ -217,15 +227,8 @@ public FileStatus getFileStatus(Path path) throws IOException {
217227
private static boolean runDDL(Driver driver, String sql) throws QueryFailedException {
218228
int retryCount = 1; // # of times to retry if first attempt fails
219229
for (int attempt = 0; attempt <= retryCount; ++attempt) {
220-
try {
221230
driver.run(sql);
222-
return true;
223-
} catch (CommandNeedRetryException e) {
224-
if (attempt == retryCount) {
225-
throw new QueryFailedException(sql, e);
226-
}
227231
continue;
228-
}
229232
} // for
230233
return false;
231234
}

0 commit comments

Comments
 (0)