Skip to content

Instantly share code, notes, and snippets.

@mattshma
Last active December 29, 2016 08:21
Show Gist options
  • Select an option

  • Save mattshma/3f83aaf4aee39162543009e9a46b29f7 to your computer and use it in GitHub Desktop.

Select an option

Save mattshma/3f83aaf4aee39162543009e9a46b29f7 to your computer and use it in GitHub Desktop.
MultiThreadHBaseRest
package com.yz;
import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster;
import org.apache.hadoop.hbase.rest.client.RemoteHTable;
import org.apache.hadoop.hbase.util.Bytes;
// hbase1.2 api docs: https://hbase.apache.org/1.2/apidocs/index.html
// CDH pom.xml: https://www.cloudera.com/documentation/enterprise/release-notes/topics/cdh_vd_cdh5_maven_repo.html
import java.io.IOException;
import java.util.Random;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class HbaseRest {
public static String restHost = "10.10.3.2";
public static int restPort = 80;
public static void main(String[] args) throws IOException {
Cluster cluster = new Cluster();
cluster.add(restHost, restPort);
Client client = new Client(cluster);
String tableName = "test";
RemoteHTable rt = new RemoteHTable(client, tableName);
// ReadHbase rh = new ReadHbase(rt, "10-10");
// rh.get();
ExecutorService executor = Executors.newFixedThreadPool(500);
for (int i=0; i<500; i++) {
executor.execute(new ScanHbase(rt));
}
//PutHbase ph = new PutHbase(rt);
//ph.putBatch();
}
}
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.yz</groupId>
<artifactId>HBaseRestTest</artifactId>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>
<name>HBaseRestTest</name>
<url>http://maven.apache.org</url>
<repositories>
<repository>
<id>cloudera</id>
<url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>1.2.0-cdh5.7.0</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-rest</artifactId>
<version>1.2.0-cdh5.7.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<archive>
<manifest>
<mainClass>com.yz.HbaseRest.Main</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</plugin>
</plugins>
</build>
</project>
package com.yz;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.rest.client.RemoteHTable;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class PutHbase {
private RemoteHTable table = null;
private static final String suffix = "suffix";
public PutHbase(RemoteHTable table) {
this.table = table;
}
public void putBatch() {
ExecutorService executor = Executors.newFixedThreadPool(500);
final RemoteHTable rtable = this.table;
for (int i = 1000; i < 1500; i++) {
final int temp = i;
executor.execute(new Runnable() {
public void run() {
List<Put> puts = new ArrayList<Put>();
for (int j = 0; j < 10000; j++) {
String baseRowKey = temp + "-" + j;
String rowKey = baseRowKey + "-" + suffix;
Put put = new Put(Bytes.toBytes(rowKey));
for(int k=1; k<11; k++) {
put.add(Bytes.toBytes("cf"), Bytes.toBytes("q"+k), Bytes.toBytes("value-q" + k + "-" + baseRowKey));
}
puts.add(put);
if(j%10==0) {
try {
rtable.put(puts);
} catch (IOException e) {
e.printStackTrace();
}
puts.clear();
}
}
}
});
}
}
}
package com.yz;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.rest.client.RemoteHTable;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
public class ScanHbase implements Runnable{
ThreadLocal<Map<String, String>> threadLocal = new ThreadLocal<Map<String, String>>();
private RemoteHTable table = null;
private static final String suffix = "suffix";
public ScanHbase(RemoteHTable table) {
this.table = table;
}
public void run() {
while(true) {
Random random = new Random();
int x = 1 + random.nextInt(999);
int y = random.nextInt(9980);
String startRow = x + "-" + y + "-" + suffix;
String stopRow = x + "-" + (y + 20) + "-" + suffix;
Map<String, String> map = new HashMap<String, String>();
map.put("startRow", startRow);
map.put("stopRow", stopRow);
threadLocal.set(map);
Scan scan = new Scan(Bytes.toBytes(startRow), Bytes.toBytes(stopRow));
for(int i=1; i<11; i++) {
scan.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("q"+i));
}
String valueStr = null;
try {
ResultScanner rs = this.table.getScanner(scan);
for (Result r : rs) {
byte[] value = r.getValue(Bytes.toBytes("cf"), Bytes.toBytes("q1"));
valueStr = Bytes.toString(value);
System.out.println(valueStr);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
@mattshma
Copy link
Author

generate jar: mvn assembly:assembly
run jar: java -cp target/xxxx-jar-with-dependencies.jar X.Y.ClassName

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment