大数据架构-使用HBase和Solr将存储与索引放在不同的机器上
大数据架构-使用HBase和Solr将存储与索引放在不同的机器上
摘要:HBase可以通过协处理器Coprocessor的方式向Solr发出请求,Solr对于接收到的数据可以做相关的同步:增、删、改索引的操作,这样就可以同时使用HBase存储量大和Solr检索性能高的优点了,更何况HBase和Solr都可以集群。这对海量数据存储、检索提供了一种方式,将存储与索引放在不同的机器上,是大数据架构的必须品。 关键词:HBase,Solr,Coprocessor,大数据,架构/* *版权:王安琪 *描述:监视HBase,一有数据postPut就向Solr发送,本类要作为触发器添加到HBase *修改时间:2014-05-27 *修改内容:新增 */ packagesolrHbase.test; importjava.io.UnsupportedEncodingException; import***; publicclassSorlIndexCoprocessorObserverextendsBaseRegionObserver{ privatestaticfinalLoggerLOG=LoggerFactory .getLogger(SorlIndexCoprocessorObserver.class); privatestaticfinalStringsolrUrl="http://192.1.11.108:80/solr/core1"; privatestaticfinalSolrServersolrServer=newConcurrentUpdateSolrServer( solrUrl,10000,20); /** *建立solr索引 * *@throwsUnsupportedEncodingException */ @Override publicvoidpostPut(finalObserverContext<RegionCoprocessorEnvironment>e, finalPutput,finalWALEditedit,finalbooleanwriteToWAL) throwsUnsupportedEncodingException{ inputSolr(put); } publicvoidinputSolr(Putput){ try{ solrServer.add(TestSolrMain.getInputDoc(put)); }catch(Exceptionex){ LOG.error(ex.getMessage()); } } } |
publicstaticSolrInputDocumentgetInputDoc(Putput){ SolrInputDocumentdoc=newSolrInputDocument(); doc.addField("test_ID",Bytes.toString(put.getRow())); for(KeyValuec:put.getFamilyMap().get(Bytes.toBytes(columnFamily))){ Stringkey=Bytes.toString(c.getKey()); Stringvalue=Bytes.toString(c.getValue()); if(value.isEmpty()){ continue; } StringfieldName=key.substring(key.indexOf(columnFamily)+3, key.indexOf("")).trim(); doc.addField(fieldName,value); } returndoc; } |
/* *版权:王安琪 *描述:测试HBaseInsert,HBase插入性能 *修改时间:2014-05-27 *修改内容:新增 */ packagesolrHbase.test; importhbaseInput.HbaseInsert; import***; publicclassTestHBaseMain{ privatestaticConfigurationconfig; privatestaticStringtableName="angelHbase"; privatestaticHTabletable=null; privatestaticfinalStringcolumnFamily="wanganqi"; /** *@paramargs */ publicstaticvoidmain(String[]args){ config=HBaseConfiguration.create(); config.set("hbase.zookeeper.quorum","192.103.101.104"); HbaseInsert.createTable(config,tableName,columnFamily); try{ table=newHTable(config,Bytes.toBytes(tableName)); for(intk=0;k<1;k++){ Threadt=newThread(){ publicvoidrun(){ for(inti=0;i<100000;i++){ HbaseInsert.inputData(table, PutCreater.createPuts(1000,columnFamily)); Calendarc=Calendar.getInstance(); StringdateTime=c.get(Calendar.YEAR)+"-" +c.get(Calendar.MONTH)+"-" +c.get(Calendar.DATE)+"T" +c.get(Calendar.HOUR)+":" +c.get(Calendar.MINUTE)+":" +c.get(Calendar.SECOND)+":" +c.get(Calendar.MILLISECOND)+"Z写入:" +i*1000; System.out.println(dateTime); } } }; t.start(); } }catch(IOExceptione1){ e1.printStackTrace(); } } } |
/* *版权:王安琪 *描述:与HBase相关操作,建表与插入数据 *修改时间:2014-05-27 *修改内容:新增 */ packagehbaseInput; import***; importorg.apache.hadoop.hbase.client.Put; publicclassHbaseInsert{ publicstaticvoidcreateTable(Configurationconfig,StringtableName, StringcolumnFamily){ HBaseAdminhBaseAdmin; try{ hBaseAdmin=newHBaseAdmin(config); if(hBaseAdmin.tableExists(tableName)){ return; } HTableDescriptortableDescriptor=newHTableDescriptor(tableName); tableDescriptor.addFamily(newHColumnDescriptor(columnFamily)); hBaseAdmin.createTable(tableDescriptor); hBaseAdmin.close(); }catch(MasterNotRunningExceptione){ e.printStackTrace(); }catch(ZooKeeperConnectionExceptione){ e.printStackTrace(); }catch(IOExceptione){ e.printStackTrace(); } } publicstaticvoidinputData(HTabletable,ArrayList<Put>puts){ try{ table.put(puts); table.flushCommits(); puts.clear(); }catch(IOExceptione){ e.printStackTrace(); } } } |
publicstaticPutcreatePut(StringcolumnFamily){ Stringss=getSentence(); byte[]family=Bytes.toBytes(columnFamily); byte[]rowKey=Bytes.toBytes(""+Math.abs(r.nextLong())); Putput=newPut(rowKey); put.add(family,Bytes.toBytes("DeviceID"), Bytes.toBytes(""+Math.abs(r.nextInt()))); ****** put.add(family,Bytes.toBytes("Company_mmsegsm"),Bytes.toBytes("ss")); returnput; } |
privatestaticvoidsendConcurrentUpdateSolrServer(finalStringurl, finalintcount)throwsSolrServerException,IOException{ SolrServersolrServer=newConcurrentUpdateSolrServer(url,10000,20); for(inti=0;i<count;i++){ solrServer.add(getInputDoc(PutCreater.createPut(columnFamily))); } } |