1+ /**
2+ * Licensed to the Apache Software Foundation (ASF) under one
3+ * or more contributor license agreements. See the NOTICE file
4+ * distributed with this work for additional information
5+ * regarding copyright ownership. The ASF licenses this file
6+ * to you under the Apache License, Version 2.0 (the
7+ * "License"); you may not use this file except in compliance
8+ * with the License. You may obtain a copy of the License at
9+ *
10+ * http://www.apache.org/licenses/LICENSE-2.0
11+ *
12+ * Unless required by applicable law or agreed to in writing, software
13+ * distributed under the License is distributed on an "AS IS" BASIS,
14+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+ * See the License for the specific language governing permissions and
16+ * limitations under the License.
17+ */
18+ package org .apache .hadoop .hbase .regionserver ;
19+
20+ import static org .apache .hadoop .hbase .HBaseTestingUtility .fam1 ;
21+ import static org .junit .Assert .fail ;
22+ import static org .mockito .Mockito .spy ;
23+
24+ import java .io .IOException ;
25+ import java .lang .reflect .Field ;
26+ import java .util .concurrent .BlockingQueue ;
27+ import java .util .concurrent .ScheduledExecutorService ;
28+ import java .util .concurrent .ThreadPoolExecutor ;
29+ import java .util .concurrent .TimeUnit ;
30+ import org .apache .hadoop .hbase .CompatibilitySingletonFactory ;
31+ import org .apache .hadoop .hbase .HBaseClassTestRule ;
32+ import org .apache .hadoop .hbase .HBaseTestingUtility ;
33+ import org .apache .hadoop .hbase .ServerName ;
34+ import org .apache .hadoop .hbase .TableName ;
35+ import org .apache .hadoop .hbase .client .ColumnFamilyDescriptorBuilder ;
36+ import org .apache .hadoop .hbase .client .RegionInfo ;
37+ import org .apache .hadoop .hbase .client .RegionInfoBuilder ;
38+ import org .apache .hadoop .hbase .client .TableDescriptor ;
39+ import org .apache .hadoop .hbase .client .TableDescriptorBuilder ;
40+ import org .apache .hadoop .hbase .testclassification .LargeTests ;
41+ import org .apache .hadoop .hbase .testclassification .RegionServerTests ;
42+ import org .apache .hadoop .hbase .util .EnvironmentEdgeManagerTestHelper ;
43+ import org .apache .hadoop .metrics2 .MetricsExecutor ;
44+ import org .junit .AfterClass ;
45+ import org .junit .Assert ;
46+ import org .junit .ClassRule ;
47+ import org .junit .Test ;
48+ import org .junit .experimental .categories .Category ;
49+ import org .slf4j .Logger ;
50+ import org .slf4j .LoggerFactory ;
51+
52+ @ Category ({ RegionServerTests .class , LargeTests .class })
53+ public class TestOpenRegionFailedMemoryLeak {
54+
55+ @ ClassRule
56+ public static final HBaseClassTestRule CLASS_RULE =
57+ HBaseClassTestRule .forClass (TestOpenRegionFailedMemoryLeak .class );
58+
59+ private static final Logger LOG = LoggerFactory .getLogger (TestOpenRegionFailedMemoryLeak .class );
60+
61+ private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility ();
62+
63+ @ AfterClass
64+ public static void tearDown () throws IOException {
65+ EnvironmentEdgeManagerTestHelper .reset ();
66+ LOG .info ("Cleaning test directory: " + TEST_UTIL .getDataTestDir ());
67+ TEST_UTIL .cleanupTestDir ();
68+ }
69+
70+ // make sure the region is successfully closed when the coprocessor config is wrong
71+ @ Test
72+ public void testOpenRegionFailedMemoryLeak () throws Exception {
73+ final ServerName serverName = ServerName .valueOf ("testOpenRegionFailed" , 100 , 42 );
74+ final RegionServerServices rss = spy (TEST_UTIL .createMockRegionServerService (serverName ));
75+
76+ TableDescriptor htd =
77+ TableDescriptorBuilder .newBuilder (TableName .valueOf ("testOpenRegionFailed" ))
78+ .setColumnFamily (ColumnFamilyDescriptorBuilder .of (fam1 ))
79+ .setValue ("COPROCESSOR$1" , "hdfs://test/test.jar|test||" ).build ();
80+
81+ RegionInfo hri = RegionInfoBuilder .newBuilder (htd .getTableName ()).build ();
82+ ScheduledExecutorService executor =
83+ CompatibilitySingletonFactory .getInstance (MetricsExecutor .class ).getExecutor ();
84+ for (int i = 0 ; i < 20 ; i ++) {
85+ try {
86+ HRegion .openHRegion (hri , htd , rss .getWAL (hri ), TEST_UTIL .getConfiguration (), rss , null );
87+ fail ("Should fail otherwise the test will be useless" );
88+ } catch (Throwable t ) {
89+ LOG .info ("Expected exception, continue" , t );
90+ }
91+ }
92+ TimeUnit .SECONDS .sleep (MetricsRegionWrapperImpl .PERIOD );
93+ Field [] fields = ThreadPoolExecutor .class .getDeclaredFields ();
94+ boolean found = false ;
95+ for (Field field : fields ) {
96+ if (field .getName ().equals ("workQueue" )) {
97+ field .setAccessible (true );
98+ BlockingQueue <Runnable > workQueue = (BlockingQueue <Runnable >) field .get (executor );
99+ // there are still two task not cancel, can not cause to memory lack
100+ Assert .assertTrue ("ScheduledExecutor#workQueue should equals 2, now is " +
101+ workQueue .size () + ", please check region is close" , 2 == workQueue .size ());
102+ found = true ;
103+ }
104+ }
105+ Assert .assertTrue ("can not find workQueue, test failed" , found );
106+ }
107+
108+ }
0 commit comments