hive-orc-writer.close()在windos上运行时返回null指针异常



我正在创建orc文件并向该文件添加行。它在linux上运行。但它在windows.writer.close()上不起作用,返回NPE。请找到下面的代码和下面的堆栈跟踪,并在这方面给我帮助。代码:--包com.testing;

import java.io.IOException;
import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.io.orc.OrcFile;
import org.apache.hadoop.hive.ql.io.orc.OrcFile.WriterOptions;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.ql.io.orc.OrcFile;
import org.apache.hadoop.hive.ql.io.orc.OrcFile.WriterOptions;
import org.apache.hadoop.hive.ql.io.orc.Writer;

public class Typical  {
    public static void main(String args[]){
        String filePath ="C:/usr/tmp/EDMS_FILE_ARCHIVE_.orc";
        TypeInfo typeInfo=TypeInfoUtils.getTypeInfoFromTypeString("struct<a:string>");
          ObjectInspector inspector=TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfo);
          WriterOptions options=OrcFile.writerOptions(new Configuration()).inspector(inspector);
          //Path path=new Path(temporaryFolder.getRoot().getCanonicalPath(),"part-00000");
          Writer writer;
        try {
            writer = OrcFile.createWriter(new Path(filePath),options);
             writer.addRow(Arrays.asList("hello"));
              writer.close();
        } catch (IllegalArgumentException | IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
}

statcktrace:--
Exception in thread "main" java.lang.NullPointerException
    at java.lang.ProcessBuilder.start(Unknown Source)
    at org.apache.hadoop.util.Shell.runCommand(Shell.java:482)
    at org.apache.hadoop.util.Shell.run(Shell.java:455)
    at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)
    at org.apache.hadoop.util.Shell.execCommand(Shell.java:808)
    at org.apache.hadoop.util.Shell.execCommand(Shell.java:791)
    at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:656)
    at org.apache.hadoop.fs.FilterFileSystem.setPermission(FilterFileSystem.java:490)
    at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:462)
    at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:428)
    at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:908)
    at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:889)
    at org.apache.hadoop.hive.ql.io.orc.WriterImpl.getStream(WriterImpl.java:1967)
    at org.apache.hadoop.hive.ql.io.orc.WriterImpl.flushStripe(WriterImpl.java:1984)
    at org.apache.hadoop.hive.ql.io.orc.WriterImpl.close(WriterImpl.java:2289)
    at com.testing.Typical.main(Typical.java:30)

Advance Thanks
Hanuman

在windows上使用orc库并写入本地文件时出现问题:

java.lang.NullPointerException
at java.lang.ProcessBuilder.start(Unknown Source)

程序想要执行chmod命令,但它不存在于windows中——在这里您可以获得NPE。我找到了我不喜欢的变通方法:

  • dowload winutils.exe
  • System.setProperty("hadoop.home.dir","c:\path\to\winutils");wintutils库的实际路径应该是c:\path\to\winutils\bin