In addition to Weibo, there is also WeChat
Please pay attention
WeChat public account
Shulou
2025-02-28 Update From: SLTechnology News&Howtos shulou NAV: SLTechnology News&Howtos > Database >
Share
Shulou(Shulou.com)06/01 Report--
Hbase itself is not paged query, I found a lot of information on the Internet to achieve a paging function, here to make a record, share with you, if there are any deficiencies, please feel free to point out. Don't talk much nonsense, look at the code.
Import java.io.IOException
Import java.util.LinkedHashMap
Import java.util.LinkedList
Import java.util.List
Import java.util.Map
Import org.apache.commons.lang.StringUtils
Import org.apache.hadoop.conf.Configuration
Import org.apache.hadoop.hbase.HBaseConfiguration
Import org.apache.hadoop.hbase.client.Get
Import org.apache.hadoop.hbase.client.HTableInterface
Import org.apache.hadoop.hbase.client.HTablePool
Import org.apache.hadoop.hbase.client.Result
Import org.apache.hadoop.hbase.client.ResultScanner
Import org.apache.hadoop.hbase.client.Scan
Import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
Import org.apache.hadoop.hbase.filter.Filter
Import org.apache.hadoop.hbase.filter.FilterList
Import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter
Import org.apache.hadoop.hbase.filter.SingleColumnValueFilter
Import org.apache.hadoop.hbase.util.Bytes
Publicclass HBaseUtils {
Privatestatic Configuration config = null
Privatestatic HTablePool tp = null
Static {
/ / load cluster configuration
Config = HBaseConfiguration.create ()
Config.set ("hbase.zookeeper.quorum", "xx.xx.xx")
Config.set ("hbase.zookeeper.property.clientPort", "2181")
/ / create a table pool (can slightly improve query performance, please ask Baidu or official API for details)
Tp = new HTablePool (config, 10)
}
/ *
* get the table of hbase
, /
Publicstatic HTableInterface getTable (StringtableName) {
If (StringUtils.isEmpty (tableName))
Returnnull
Returntp.getTable (getBytes (tableName))
}
/ * convert byte array * /
Publicstaticbyte [] getBytes (String str) {
If (str = = null)
Str= ""
Return Bytes.toBytes (str)
}
/ * *
* query data
* @ param tableKey table identification
* @ param queryKey query identity
* @ param startRow start line
* @ param paramsMap parameter collection
* @ return result set
, /
Publicstatic TBData getDataMap (StringtableName, String startRow
StringstopRow, Integer currentPage, Integer pageSize)
Throws IOException {
ListmapList = null
MapList = new LinkedList ()
ResultScanner scanner = null
/ / the wrapper class object created for paging. The specific properties are given below.
TBData tbData = null
Try {
/ / get the maximum number of returned results
If (pageSize = = null | | pageSize = = 0L)
PageSize = 100
If (currentPage = = null | | currentPage = = 0)
CurrentPage = 1
/ / calculate the start and end pages
IntegerfirstPage = (currentPage-1) * pageSize
IntegerendPage = firstPage + pageSize
/ / retrieve HBASE table objects from the table pool
HTableInterfacetable = getTable (tableName)
/ / get the filter object
Scanscan = getScan (startRow, stopRow)
/ / put the filter into the filter (true identifies paging, the specific method is below)
Scan.setFilter (packageFilters (true))
/ / cache 1000 pieces of data
Scan.setCaching (1000)
Scan.setCacheBlocks (false)
Scanner= table.getScanner (scan)
Int I = 0
List rowList = new LinkedList ()
/ / traverse the scanner object and row key the data to be queried
For (Result result: scanner) {
String row = toStr (result.getRow ())
If (I > = firstPage & & I < endPage) {
RowList.add (getBytes (row))
}
ITunes +
}
/ / get the GET object of the extracted row key
ListgetList = getList (rowList)
Result [] results = table.get (getList)
/ / traversing the results
For (Result result: results) {
Map fmap = packFamilyMap (result)
Map rmap = packRowMap (fmap)
MapList.add (rmap)
}
/ / encapsulate the paging object
TbData= new TBData ()
TbData.setCurrentPage (currentPage)
TbData.setPageSize (pageSize)
TbData.setTotalCount (I)
TbData.setTotalPage (getTotalPage (pageSize, I))
TbData.setResultList (mapList)
} catch (IOException e) {
E.printStackTrace ()
} finally {
CloseScanner (scanner)
}
Return tbData
}
Privatestaticint getTotalPage (int pageSize, int totalCount) {
Int n = totalCount / pageSize
If (totalCount% pageSize = = 0) {
Return n
} else {
Return ((int) n) + 1
}
}
/ / get the scanner object
Privatestatic Scan getScan (String startRow,String stopRow) {
Scan scan = new Scan ()
Scan.setStartRow (getBytes (startRow))
Scan.setStopRow (getBytes (stopRow))
Return scan
}
/ * *
* encapsulate query conditions
, /
Privatestatic FilterList packageFilters (boolean isPage) {
FilterList filterList = null
/ / MUST_PASS_ALL (conditional AND) MUST_PASS_ONE (conditional OR)
FilterList = new FilterList (FilterList.Operator.MUST_PASS_ALL)
Filter filter1 = null
Filter filter2 = null
Filter1 = newFilter (getBytes ("family1"), getBytes ("column1")
CompareOp.EQUAL, getBytes ("condition1"))
Filter2 = newFilter (getBytes ("family2"), getBytes ("column1")
CompareOp.LESS, getBytes ("condition2"))
FilterList.addFilter (filter1)
FilterList.addFilter (filter2)
If (isPage) {
FilterList.addFilter (new FirstKeyOnlyFilter ())
}
Return filterList
}
Privatestatic Filter newFilter (byte [] f, byte [] c, CompareOp op, byte [] v) {
Returnnew SingleColumnValueFilter (f, c, op,v)
}
Privatestaticvoid closeScanner (ResultScannerscanner) {
If (scanner! = null)
Scanner.close ()
}
/ * *
* encapsulate each row of data
, /
Privatestatic MappackRowMap (Map dataMap) {
Map map = new LinkedHashMap ()
For (byte [] key: dataMap.keySet ()) {
Byte [] value = dataMap.get (key)
Map.put (toStr (key), toStr (value))
}
Return map
}
/ * get the collection of GET objects according to the ROW KEY collection * /
Privatestatic List getList (List rowList) {
List list = new LinkedList ()
For (byte [] row: rowList) {
Getget = new Get (row)
Get.addColumn (getBytes ("family1"), getBytes ("column1"))
Get.addColumn (getBytes ("family1"), getBytes ("column2"))
Get.addColumn (getBytes ("family2"), getBytes ("column1"))
List.add (get)
}
Return list
}
/ * *
* encapsulate all field column families of the configuration
, /
Privatestatic Map packFamilyMap (Result result) {
Map dataMap = null
DataMap = new LinkedHashMap ()
DataMap.putAll (result.getFamilyMap (getBytes ("family1")
DataMap.putAll (result.getFamilyMap (getBytes ("family2")
Return dataMap
}
Privatestatic String toStr (byte [] bt) {
Return Bytes.toString (bt)
}
Publicstaticvoid main (String [] args) throws IOException {
/ / take out the start and end lines of row key
/ / #
Welcome to subscribe "Shulou Technology Information " to get latest news, interesting things and hot topics in the IT industry, and controls the hottest and latest Internet news, technology news and IT industry trends.
Views: 0
*The comments in the above article only represent the author's personal views and do not represent the views and positions of this website. If you have more insights, please feel free to contribute and share.
Continue with the installation of the previous hadoop.First, install zookooper1. Decompress zookoope
"Every 5-10 years, there's a rare product, a really special, very unusual product that's the most un
© 2024 shulou.com SLNews company. All rights reserved.