hbase.DoNotRetryIOException при чтении с использованием феникса?

Hbase Версия: 1.1.3 Phoenix Версия: 4.7.0

После добавления данных я смог прочитать данные из Hbase с помощью Phoenix.

После перезапуска кластера появляется следующая ошибка

Sqlline log

0: jdbc:phoenix:localhost> select count(*) from PRICEDATA;
    16/06/01 12:39:39 WARN ipc.CoprocessorRpcChannel: Call failed on IOException
    org.apache.hadoop.hbase.DoNotRetryIOException: org.apache.hadoop.hbase.DoNotRetryIOException: PRICEDATA: at index 10
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:87)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:484)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11705)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7606)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1890)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1872)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2117)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
        at java.lang.Thread.run(Thread.java:745)
    Caused by: java.lang.NullPointerException: at index 10
        at com.google.common.collect.ImmutableList.checkElementNotNull(ImmutableList.java:311)
        at com.google.common.collect.ImmutableList.construct(ImmutableList.java:302)
        at com.google.common.collect.ImmutableList.copyOf(ImmutableList.java:278)
        at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:424)
        at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:315)
        at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:303)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:883)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:501)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2481)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2426)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:451)
        ... 10 more

        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
        at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
        at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:284)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1611)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:93)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:90)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:117)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:93)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:96)
        at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:57)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDataProtos.java:7891)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1271)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
        at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1608)
        at java.util.concurrent.FutureTask.run(FutureTask.java:262)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
    Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOException): org.apache.hadoop.hbase.DoNotRetryIOException: PRICEDATA: at index 10
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:87)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:484)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11705)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7606)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1890)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1872)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2117)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
        at java.lang.Thread.run(Thread.java:745)
    Caused by: java.lang.NullPointerException: at index 10
        at com.google.common.collect.ImmutableList.checkElementNotNull(ImmutableList.java:311)
        at com.google.common.collect.ImmutableList.construct(ImmutableList.java:302)
        at com.google.common.collect.ImmutableList.copyOf(ImmutableList.java:278)
        at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:424)
        at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:315)
        at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:303)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:883)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:501)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2481)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2426)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:451)
        ... 10 more

        at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1457)
        at org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1661)
        at org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1719)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:30411)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1607)
        ... 14 more
    16/06/01 12:39:39 WARN client.HTable: Error calling coprocessor service org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for row \x00\x00PRICEDATA
    java.util.concurrent.ExecutionException: org.apache.hadoop.hbase.DoNotRetryIOException: org.apache.hadoop.hbase.DoNotRetryIOException: PRICEDATA: at index 10
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:87)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:484)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11705)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7606)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1890)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1872)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2117)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
        at java.lang.Thread.run(Thread.java:745)
    Caused by: java.lang.NullPointerException: at index 10
    at com.google.common.collect.ImmutableList.checkElementNotNull(ImmutableList.java:311)
        at com.google.common.collect.ImmutableList.construct(ImmutableList.java:302)
        at com.google.common.collect.ImmutableList.copyOf(ImmutableList.java:278)
        at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:424)
        at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:315)
        at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:303)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:883)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:501)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2481)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2426)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:451)
        ... 10 more

        at java.util.concurrent.FutureTask.report(FutureTask.java:122)
        at java.util.concurrent.FutureTask.get(FutureTask.java:188)
        at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1620)
        at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1577)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1006)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1257)
        at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:350)
        at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:311)
        at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:307)
        at org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:333)
        at org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:237)
        at org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:160)
        at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:340)
        at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:330)
        at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:240)
        at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:235)
        at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
        at org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:234)
        at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1100)
        at sqlline.Commands.execute(Commands.java:822)
        at sqlline.Commands.sql(Commands.java:732)
        at sqlline.SqlLine.dispatch(SqlLine.java:808)
        at sqlline.SqlLine.begin(SqlLine.java:681)
        at sqlline.SqlLine.start(SqlLine.java:398)
        at sqlline.SqlLine.main(SqlLine.java:292)
    Caused by: org.apache.hadoop.hbase.DoNotRetryIOException: org.apache.hadoop.hbase.DoNotRetryIOException: PRICEDATA: at index 10
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:87)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:484)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11705)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7606)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1890)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1872)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2117)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
        at java.lang.Thread.run(Thread.java:745)
    Caused by: java.lang.NullPointerException: at index 10
        at com.google.common.collect.ImmutableList.checkElementNotNull(ImmutableList.java:311)
        at com.google.common.collect.ImmutableList.construct(ImmutableList.java:302)
        at com.google.common.collect.ImmutableList.copyOf(ImmutableList.java:278)
        at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:424)
        at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:315)
        at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:303)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:883)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:501)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2481)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2426)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:451)
        ... 10 more

        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
        at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
        at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:284)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1611)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:93)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:90)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:117)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:93)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:96)
        at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:57)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDataProtos.java:7891)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1271)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1258)
        at org.apache.hadoop.hbase.client.HTable$17.call(HTable.java:1608)
        at java.util.concurrent.FutureTask.run(FutureTask.java:262)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)

Журнал сервера региона HBASE

2016-06-01 13:07:47,467 INFO  [B.defaultRpcServer.handler=6,queue=0,port=16201] zookeeper.RecoverableZooKeeper: Process identifier=hconnection-0x574b597 connecting to ZooKeeper ensemble=HMECL001076:2181
2016-06-01 13:07:47,467 INFO  [B.defaultRpcServer.handler=6,queue=0,port=16201] zookeeper.ZooKeeper: Initiating client connection, connectString=HMECL001076:2181 sessionTimeout=90000 watcher=hconnection-0x574b5970x0, quorum=HMECL001076:2181, baseZNode=/hbase
2016-06-01 13:07:47,468 INFO  [B.defaultRpcServer.handler=6,queue=0,port=16201-SendThread(HMECL001076:2181)] zookeeper.ClientCnxn: Opening socket connection to server HMECL001076/127.0.1.1:2181. Will not attempt to authenticate using SASL (unknown error)
2016-06-01 13:07:47,470 INFO  [B.defaultRpcServer.handler=6,queue=0,port=16201-SendThread(HMECL001076:2181)] zookeeper.ClientCnxn: Socket connection established to HMECL001076/127.0.1.1:2181, initiating session
2016-06-01 13:07:47,475 INFO  [B.defaultRpcServer.handler=6,queue=0,port=16201-SendThread(HMECL001076:2181)] zookeeper.ClientCnxn: Session establishment complete on server HMECL001076/127.0.1.1:2181, sessionid = 0x1550abc32310033, negotiated timeout = 40000
2016-06-01 13:07:47,481 INFO  [B.defaultRpcServer.handler=6,queue=0,port=16201] client.ConnectionManager$HConnectionImplementation: Closing zookeeper sessionid=0x1550abc32310033
2016-06-01 13:07:47,483 INFO  [B.defaultRpcServer.handler=6,queue=0,port=16201] zookeeper.ZooKeeper: Session: 0x1550abc32310033 closed
2016-06-01 13:07:47,483 INFO  [B.defaultRpcServer.handler=6,queue=0,port=16201-EventThread] zookeeper.ClientCnxn: EventThread shut down
2016-06-01 13:07:47,485 ERROR [B.defaultRpcServer.handler=6,queue=0,port=16201] coprocessor.MetaDataEndpointImpl: getTable failed
java.lang.NullPointerException: at index 10
    at com.google.common.collect.ImmutableList.checkElementNotNull(ImmutableList.java:311)
    at com.google.common.collect.ImmutableList.construct(ImmutableList.java:302)
    at com.google.common.collect.ImmutableList.copyOf(ImmutableList.java:278)
    at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:424)
    at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:315)
    at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:303)
    at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:883)
    at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:501)
    at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2481)
    at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2426)
    at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:451)
    at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11705)
    at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7606)
    at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1890)
    at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1872)
    at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
    at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2117)
    at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
    at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
    at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
    at java.lang.Thread.run(Thread.java:745)

Я попробовал вручную просканировать таблицу hbase и смог получить данные с помощью оболочки hbase. Но через phoenix api я не могу писать или читать данные. Эта ошибка возникла ранее, после перезапуска узлов проблема была устранена. Но в настоящее время он не работает. Это вызывает проблемы только с некоторыми таблицами. все таблицы имеют почти одинаковую схему.

Запрос на создание таблицы

CREATE TABLE IF NOT EXISTS Pricedata (
    NUM_11 DOUBLE,
    D81 VARCHAR,
    D83 DOUBLE,
    D82 VARCHAR,
    D77 VARCHAR NOT NULL PRIMARY KEY,
    NUM_9 DOUBLE,
    D80 VARCHAR,
    D79 BIGINT,
    D78 BIGINT,
    NUM_10 DOUBLE);

person Vishnu667    schedule 01.06.2016    source источник


Ответы (2)


Вероятно, что-то было повреждено или не обновилось должным образом. Используйте оболочку HBase для отключения и удаления таблицы SYSTEM.CATALOG Phoenix. Phoenix регенерирует эти таблицы при инициализации. Сначала сделайте резервную копию кластера;)

И снова запустите команду создания таблицы, чтобы восстановить таблицу.

person kliew    schedule 05.06.2016
comment
Я отключил и удалил файлы каталога, но Phoenix не восстановил эти таблицы даже после перезапуска узла. - person Vishnu667; 08.06.2016
comment
Он должен это сделать, если ему удастся успешно запустить. Я предполагаю, что это все еще не так? - person kliew; 09.06.2016

После удаления данных из таблицы SYSTEM.CATALOG вам необходимо повторно запустить команду phoenix create sqls. Это необходимо для повторного заполнения метаданных в таблице CATALOG.

person dev    schedule 13.08.2016